diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 7bca37f8f..86db5f5e5 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,8 +8,7 @@ updates: interval: "weekly" assignees: - "xylar" - - "altheaden" reviewers: - "xylar" - - "altheaden" + - "andrewdnolan" diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index b62231bc3..138431ba8 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -27,7 +27,7 @@ jobs: shell: bash -l {0} strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13"] fail-fast: false steps: - id: skip_check @@ -37,7 +37,7 @@ jobs: paths_ignore: ${{ env.PATHS_IGNORE }} - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} - uses: actions/checkout@v4 + uses: actions/checkout@v5 - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Cache Conda @@ -53,21 +53,22 @@ jobs: - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Set up Conda Environment - uses: conda-incubator/setup-miniconda@v3 + uses: mamba-org/setup-micromamba@v2 with: - activate-environment: "mpas_analysis_ci" - miniforge-version: latest - channels: conda-forge - channel-priority: strict - auto-update-conda: false - python-version: ${{ matrix.python-version }} + environment-name: mpas_analysis_dev + init-shell: bash + condarc: | + channel_priority: strict + channels: + - conda-forge + create-args: >- + python=${{ matrix.python-version }} - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Install mpas_analysis run: | - conda create -n mpas_analysis_dev --file dev-spec.txt \ - python=${{ matrix.python-version }} - conda activate mpas_analysis_dev + conda install -y --file dev-spec.txt \ + python=${{ matrix.python-version }} python -m pip install --no-deps --no-build-isolation -vv -e . - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} @@ -76,7 +77,6 @@ jobs: CHECK_IMAGES: False run: | set -e - conda activate mpas_analysis_dev pip check pytest --pyargs mpas_analysis mpas_analysis --help @@ -85,9 +85,5 @@ jobs: - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Build Sphinx Docs run: | - conda activate mpas_analysis_dev - # sphinx-multiversion expects at least a "main" branch - git branch main || echo "branch main already exists." cd docs - sphinx-multiversion . _build/html - + DOCS_VERSION=test make versioned-html diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index 832419107..b90bb7882 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -20,7 +20,7 @@ jobs: shell: bash -l {0} timeout-minutes: 20 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: persist-credentials: false fetch-depth: 0 @@ -37,52 +37,60 @@ jobs: - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Set up Conda Environment - uses: conda-incubator/setup-miniconda@v3 + uses: mamba-org/setup-micromamba@v2 with: - activate-environment: "mpas_analysis_ci" - miniforge-version: latest - channels: conda-forge - channel-priority: strict - auto-update-conda: false - python-version: ${{ env.PYTHON_VERSION }} + environment-name: mpas_analysis_dev + init-shell: bash + condarc: | + channel_priority: strict + channels: + - conda-forge + create-args: >- + python=${{ env.PYTHON_VERSION }} - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Install mpas_analysis run: | git config --global url."https://github.com/".insteadOf "git@github.com:" - conda create -n mpas_analysis_dev --file dev-spec.txt \ + conda install -y --file dev-spec.txt \ python=${{ env.PYTHON_VERSION }} - conda activate mpas_analysis_dev python -m pip install -vv --no-deps --no-build-isolation -e . - name: Build Sphinx Docs run: | set -e - conda activate mpas_analysis_dev pip check mpas_analysis sync diags --help cd docs - sphinx-multiversion . _build/html + DOCS_VERSION=${{ github.ref_name }} make versioned-html - name: Copy Docs and Commit run: | set -e - conda activate mpas_analysis_dev pip check mpas_analysis sync diags --help cd docs # gh-pages branch must already exist git clone https://github.com/MPAS-Dev/MPAS-Analysis.git --branch gh-pages --single-branch gh-pages + + # Only replace docs in a directory with the destination branch name with latest changes. Docs for + # releases should be untouched. + rm -rf gh-pages/${{ github.ref_name }} + + # don't clobber existing release versions (in case we retroactively fixed them) + cp -r _build/html/${{ github.ref_name }} gh-pages/ + + mkdir -p gh-pages/shared + cp shared/version-switcher.js gh-pages/shared/version-switcher.js + + # Update the list of versions with all versions in the gh-pages directory. + python generate_versions_json.py + # Make sure we're in the gh-pages directory. cd gh-pages # Create `.nojekyll` (if it doesn't already exist) for proper GH Pages configuration. touch .nojekyll # Add `index.html` to point to the `develop` branch automatically. printf '' > index.html - # Only replace docs in a directory with the destination branch name with latest changes. Docs for - # releases should be untouched. - rm -rf ${{ github.head_ref || github.ref_name }} - # don't clobber existing release versions (in case we retroactively fixed them) - cp -r -n ../_build/html/* . # Configure git using GitHub Actions credentials. git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" git config --local user.name "github-actions[bot]" diff --git a/.gitignore b/.gitignore index e359827db..3512ba6ed 100644 --- a/.gitignore +++ b/.gitignore @@ -99,3 +99,6 @@ ENV/ /chrysalis_test_suite/ /cori_test_suite/ /compy_test_suite/ + +# vscode settings +.vscode/ \ No newline at end of file diff --git a/README.md b/README.md index 7c7ce60de..a9a9281c1 100644 --- a/README.md +++ b/README.md @@ -54,8 +54,8 @@ environment): ``` bash conda config --add channels conda-forge conda config --set channel_priority strict -conda create -y -n mpas_dev --file dev-spec.txt -conda activate mpas_dev +conda create -y -n mpas_analysis_dev --file dev-spec.txt +conda activate mpas_analysis_dev python -m pip install --no-deps --no-build-isolation -e . ``` @@ -64,16 +64,16 @@ for MPAS-Tools or geometric\_features), you should first comment out the other package in `dev-spec.txt`. Then, you can install both packages in the same development environment, e.g.: ``` bash -conda create -y -n mpas_dev --file tools/MPAS-Tools/conda_package/dev-spec.txt \ +conda create -y -n mpas_analysis_dev --file tools/MPAS-Tools/conda_package/dev-spec.txt \ --file analysis/MPAS-Analysis/dev-spec.txt -conda activate mpas_dev +conda activate mpas_analysis_dev cd tools/MPAS-Tools/conda_package python -m pip install --no-deps --no-build-isolation -e . cd ../../../analysis/MPAS-Analysis python -m pip install --no-deps --no-build-isolation -e . ``` Obviously, the paths to the repos may be different in your local clones. With -the `mpas_dev` environment as defined above, you can make changes to both +the `mpas_analysis_dev` environment as defined above, you can make changes to both `mpas_tools` and `mpas-analysis` packages in their respective branches, and these changes will be reflected when refer to the packages or call their respective entry points (command-line tools). @@ -294,8 +294,7 @@ developers". Then run: To generate the `sphinx` documentation, run: ``` cd docs -make clean -make html +DOCS_VERSION=test make clean versioned-html ``` The results can be viewed in your web browser by opening: ``` diff --git a/ci/python3.9.yaml b/ci/python3.9.yaml deleted file mode 100644 index 7929b1920..000000000 --- a/ci/python3.9.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channel_sources: -- conda-forge,defaults -pin_run_as_build: - python: - min_pin: x.x - max_pin: x.x -python: -- 3.9.* *_cpython diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml index 570f56586..10009615a 100644 --- a/ci/recipe/meta.yaml +++ b/ci/recipe/meta.yaml @@ -1,5 +1,6 @@ {% set name = "MPAS-Analysis" %} -{% set version = "1.13.0" %} +{% set version = "1.14.0" %} +{% set python_min = "3.10" %} package: name: {{ name|lower }} @@ -9,30 +10,32 @@ source: path: ../.. build: - number: 0 - script: {{ PYTHON }} -m pip install --no-deps --no-build-isolation -vv . - noarch: python + number: 0 + script: {{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv + noarch: python + entry_points: + - mpas_analysis = mpas_analysis.__main__:main + - download_analysis_data = mpas_analysis.download_data:download_analysis_data requirements: host: - - python >=3.9 + - python {{ python_min }} - pip - - setuptools + - setuptools >=60 run: - - python >=3.9 + - python >={{ python_min }},<3.13 - cartopy >=0.18.0 - cartopy_offlinedata - cmocean - dask - esmf >=8.4.2,<9.0.0 - - esmf=*=mpi_mpich_* - f90nml - geometric_features >=1.6.1 - gsw - lxml - mache >=1.11.0 - matplotlib-base >=3.9.0 - - mpas_tools >=0.34.1,<1.0.0 + - mpas_tools >=1.3.0,<2.0.0 - nco >=4.8.1,!=5.2.6 - netcdf4 - numpy >=2.0,<3.0 @@ -40,18 +43,19 @@ requirements: - pillow >=10.0.0,<11.0.0 - progressbar2 - pyproj - - pyremap >=1.2.0,<2.0.0 + - pyremap >=2.0.0,<3.0.0 - python-dateutil - requests - scipy >=1.7.0 - - setuptools - shapely >=2.0,<3.0 + - tranche >=0.2.3 - xarray >=0.14.1 test: requires: - pytest - pip + - python {{ python_min }} imports: - mpas_analysis - pytest @@ -78,5 +82,6 @@ about: extra: recipe-maintainers: + - andrewdnolan - xylar - jhkennedy diff --git a/configs/alcf/job_script.cooley.bash b/configs/alcf/job_script.cooley.bash index c5eda7b05..8a2ea6614 100755 --- a/configs/alcf/job_script.cooley.bash +++ b/configs/alcf/job_script.cooley.bash @@ -6,7 +6,7 @@ source /lus/theta-fs0/projects/ccsm/acme/tools/e3sm-unified/load_latest_e3sm_unified_cooley.sh # alternatively, you can load your own development environment # source ~/mambaforge/etc/profile.d/conda.sh -# conda activate mpas_dev +# conda activate mpas_analysis_dev # export E3SMU_MACHINE=cooley export HDF5_USE_FILE_LOCKING=FALSE diff --git a/configs/compy/job_script.compy.bash b/configs/compy/job_script.compy.bash index 33b153c68..916a6f63f 100644 --- a/configs/compy/job_script.compy.bash +++ b/configs/compy/job_script.compy.bash @@ -11,7 +11,7 @@ export OMP_NUM_THREADS=1 source /share/apps/E3SM/conda_envs/load_latest_e3sm_unified_compy.sh # alternatively, you can load your own development environment # source ~/mambaforge/etc/profile.d/conda.sh -# conda activate mpas_dev +# conda activate mpas_analysis_dev # export E3SMU_MACHINE=compy export HDF5_USE_FILE_LOCKING=FALSE diff --git a/configs/job_script.default.bash b/configs/job_script.default.bash index 29871522c..e1d61118f 100755 --- a/configs/job_script.default.bash +++ b/configs/job_script.default.bash @@ -8,7 +8,7 @@ export OMP_NUM_THREADS=1 source ~/mambaforge/etc/profile.d/conda.sh -conda activate mpas_dev +conda activate mpas_analysis_dev # if you are on an E3SM supported machine, you can specify it: # export E3SMU_MACHINE=chrysalis diff --git a/configs/lanl/job_script.lanl.bash b/configs/lanl/job_script.lanl.bash index 36b861410..b32399a07 100644 --- a/configs/lanl/job_script.lanl.bash +++ b/configs/lanl/job_script.lanl.bash @@ -8,7 +8,7 @@ source /users/xylar/climate/mambaforge/etc/profile.d/conda.sh source /users/xylar/climate/mambaforge/etc/profile.d/mamba.sh -mamba activate mpas_dev +mamba activate mpas_analysis_dev export HDF5_USE_FILE_LOCKING=FALSE diff --git a/configs/lcrc/job_script.anvil.bash b/configs/lcrc/job_script.anvil.bash index ef20fa541..8765a2f4a 100644 --- a/configs/lcrc/job_script.anvil.bash +++ b/configs/lcrc/job_script.anvil.bash @@ -12,7 +12,7 @@ export OMP_NUM_THREADS=1 source /lcrc/soft/climate/e3sm-unified/load_latest_e3sm_unified_anvil.sh # alternatively, you can load your own development environment # source ~/mambaforge/etc/profile.d/conda.sh -# conda activate mpas_dev +# conda activate mpas_analysis_dev # export E3SMU_MACHINE=anvil export HDF5_USE_FILE_LOCKING=FALSE diff --git a/configs/lcrc/job_script.chrysalis.bash b/configs/lcrc/job_script.chrysalis.bash index 598115df5..ab2882b78 100644 --- a/configs/lcrc/job_script.chrysalis.bash +++ b/configs/lcrc/job_script.chrysalis.bash @@ -10,7 +10,7 @@ export OMP_NUM_THREADS=1 source /lcrc/soft/climate/e3sm-unified/load_latest_e3sm_unified_chrysalis.sh # alternatively, you can load your own development environment # source ~/mambaforge/etc/profile.d/conda.sh -# conda activate mpas_dev +# conda activate mpas_analysis_dev # export E3SMU_MACHINE=chrysalis export HDF5_USE_FILE_LOCKING=FALSE diff --git a/configs/nersc/job_script.cori-haswell.bash b/configs/nersc/job_script.cori-haswell.bash index 75140ba57..210681e40 100644 --- a/configs/nersc/job_script.cori-haswell.bash +++ b/configs/nersc/job_script.cori-haswell.bash @@ -20,7 +20,7 @@ export OMP_NUM_THREADS=1 source /global/common/software/e3sm/anaconda_envs/load_latest_e3sm_unified_cori-haswell.sh # alternatively, you can load your own development environment # source ~/mambaforge/etc/profile.d/conda.sh -# conda activate mpas_dev +# conda activate mpas_analysis_dev # export E3SMU_MACHINE=cori-haswell export HDF5_USE_FILE_LOCKING=FALSE diff --git a/configs/nersc/job_script.cori-knl.bash b/configs/nersc/job_script.cori-knl.bash index 679157bb1..2eb57758d 100644 --- a/configs/nersc/job_script.cori-knl.bash +++ b/configs/nersc/job_script.cori-knl.bash @@ -20,7 +20,7 @@ export OMP_NUM_THREADS=1 source /global/common/software/e3sm/anaconda_envs/load_latest_e3sm_unified_cori-knl.sh # alternatively, you can load your own development environment # source ~/mambaforge/etc/profile.d/conda.sh -# conda activate mpas_dev +# conda activate mpas_analysis_dev # export E3SMU_MACHINE=cori-knl export HDF5_USE_FILE_LOCKING=FALSE diff --git a/configs/nersc/job_script.pm-cpu.bash b/configs/nersc/job_script.pm-cpu.bash index f096ca4b0..d4abe4969 100644 --- a/configs/nersc/job_script.pm-cpu.bash +++ b/configs/nersc/job_script.pm-cpu.bash @@ -14,7 +14,7 @@ export OMP_NUM_THREADS=1 source /global/common/software/e3sm/anaconda_envs/load_latest_e3sm_unified_pm-cpu.sh # alternatively, you can load your own development environment # source ~/mambaforge/etc/profile.d/conda.sh -# conda activate mpas_dev +# conda activate mpas_analysis_dev # export E3SMU_MACHINE=pm-cpu export HDF5_USE_FILE_LOCKING=FALSE diff --git a/configs/olcf/job_script.olcf.bash b/configs/olcf/job_script.olcf.bash index 431d1bebc..f777f74a4 100644 --- a/configs/olcf/job_script.olcf.bash +++ b/configs/olcf/job_script.olcf.bash @@ -10,7 +10,7 @@ source /gpfs/alpine/proj-shared/cli115/e3sm-unified/load_latest_e3sm_unified_andes.csh # alternatively, you can load your own development environment # source ~/mambaforge/etc/profile.d/conda.sh -# conda activate mpas_dev +# conda activate mpas_analysis_dev # export E3SMU_MACHINE=anvil export HDF5_USE_FILE_LOCKING=FALSE diff --git a/dev-spec.txt b/dev-spec.txt index dfeb22413..56b63b9bf 100644 --- a/dev-spec.txt +++ b/dev-spec.txt @@ -2,7 +2,7 @@ # $ conda create --name --file # Base -python>=3.9 +python >=3.10 cartopy >=0.18.0 cartopy_offlinedata cmocean @@ -10,38 +10,36 @@ dask esmf >=8.4.2,<9.0.0 esmf=*=mpi_mpich_* f90nml -geometric_features>=1.6.1 +geometric_features >=1.6.1 gsw lxml mache >=1.11.0 -matplotlib-base>=3.9.0 -mpas_tools>=0.34.1,<1.0.0 -nco>=4.8.1,!=5.2.6 +matplotlib-base >=3.9.0 +mpas_tools >=1.3.0,<2.0.0 +nco >=4.8.1,!=5.2.6 netcdf4 -numpy>=2.0,<3.0 +numpy >=2.0,<3.0 pandas pillow >=10.0.0,<11.0.0 progressbar2 pyproj -pyremap>=1.2.0,<2.0.0 +pyremap >=2.0.0,<3.0.0 python-dateutil requests scipy >=1.7.0 -setuptools -shapely>=2.0,<3.0 -xarray>=0.14.1 +shapely >=2.0,<3.0 +tranche >=0.2.3 +xarray >=0.14.1 # Development -flake8 -git pip pytest +setuptools >=60 # Documentation mock -m2r2>=0.3.3 -mistune<2 +m2r2 >=0.3.3 +mistune <2 sphinx sphinx_rtd_theme -sphinx-multiversion tabulate diff --git a/docs/.gitignore b/docs/.gitignore index 6290f34f4..8fd093281 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -14,4 +14,4 @@ design_docs/remapper.rst design_docs/template.rst design_docs/timekeeping_reorg.rst design_docs/variable_mapping_reorg.rst -quick_start.rst +users_guide/quick_start.rst diff --git a/docs/Makefile b/docs/Makefile index 82663109b..6cf495011 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -8,6 +8,23 @@ SPHINXPROJ = MPAS-Analysis SOURCEDIR = . BUILDDIR = _build +# Build into a versioned subdirectory +versioned-html: + @echo "Building version: $(DOCS_VERSION)" + $(SPHINXBUILD) -b html "$(SOURCEDIR)" "$(BUILDDIR)/html/$(DOCS_VERSION)" + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html/$(DOCS_VERSION)." + @echo "Setting up shared version switcher for local preview..." + mkdir -p _build/html/shared + cp shared/version-switcher.js _build/html/shared/version-switcher.js + python generate_versions_json.py --local + +# Override html target to include local setup +html: + $(SPHINXBUILD) -b html "$(SOURCEDIR)" "$(BUILDDIR)/html" + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + + # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) @@ -16,6 +33,10 @@ clean: rm -rf users_guide/*obs_table.rst developers_guide/generated users_guide/obs @$(SPHINXBUILD) -M clean "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) +clean-versioned-html: + rm -rf $(BUILDDIR)/html/* + @echo "Cleaned versioned HTML builds." + .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new diff --git a/docs/_static/style.css b/docs/_static/style.css index 6cbfde333..22d7aa0a4 100644 --- a/docs/_static/style.css +++ b/docs/_static/style.css @@ -2,3 +2,29 @@ max-width: 1200px !important; } +#version-switcher select { + background-color: #2980b9; + color: white; + border: none; + border-radius: 4px; + padding: 4px 30px 4px 10px; + font-size: 0.9em; + appearance: none; /* Remove default dropdown arrow */ + background-image: url("data:image/svg+xml;charset=UTF-8,%3Csvg fill='white' height='10' viewBox='0 0 24 24' width='10' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M7 10l5 5 5-5z'/%3E%3C/svg%3E"); + background-repeat: no-repeat; + background-position: right 10px center; + background-size: 12px; + } + + #version-switcher select:focus { + outline: none; + box-shadow: 0 0 0 2px rgba(255, 255, 255, 0.4); + background-color: #2c89c4; /* slightly lighter blue on focus */ + } + + /* Selected item in the dropdown menu */ + #version-switcher option:checked { + background-color: #dddddd; /* for selected */ + color: black; + } + diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html index efc29758f..d430c7ad6 100644 --- a/docs/_templates/layout.html +++ b/docs/_templates/layout.html @@ -3,3 +3,26 @@ {% endblock %} + +{% block footer %} + {{ super() }} + + + + + + + + + + +{% endblock %} + diff --git a/docs/_templates/versions.html b/docs/_templates/versions.html deleted file mode 100644 index 625a9a384..000000000 --- a/docs/_templates/versions.html +++ /dev/null @@ -1,28 +0,0 @@ -{%- if current_version %} -
- - Other Versions - v: {{ current_version.name }} - - -
- {%- if versions.tags %} -
-
Tags
- {%- for item in versions.tags %} -
{{ item.name }}
- {%- endfor %} -
- {%- endif %} - {%- if versions.branches %} -
-
Branches
- {%- for item in versions.branches %} -
{{ item.name }}
- {%- endfor %} -
- {%- endif %} -
-
-{%- endif %} - diff --git a/docs/conf.py b/docs/conf.py index 3e1a4841e..607ca70ed 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -37,7 +37,6 @@ # ones. extensions = [ 'sphinx_rtd_theme', - 'sphinx_multiversion', 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.intersphinx', @@ -222,14 +221,6 @@ # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] -html_sidebars = { - "**": [ - "versions.html", - ], +html_context = { + "current_version": os.getenv("DOCS_VERSION", "main"), } - -# -- Options sphinx-multiversion ------------------------------------------- -# Include tags like "tags/1.0.0" -- 1.7.2 doesn't build -smv_tag_whitelist = r'^(?!1.7.2)\d+\.\d+.\d+$' -smv_branch_whitelist = r'^(develop|main)$' -smv_remote_whitelist = 'origin' diff --git a/docs/developers_guide/api.rst b/docs/developers_guide/api.rst index c6cb11fd6..56a2e2631 100644 --- a/docs/developers_guide/api.rst +++ b/docs/developers_guide/api.rst @@ -77,6 +77,9 @@ Ocean tasks ClimatologyMapArgoTemperature ClimatologyMapArgoSalinity ClimatologyMapWaves + ClimatologyMapCustom + ClimatologyMapWindStressCurl + GeojsonNetcdfTransects IndexNino34 MeridionalHeatTransport OceanHistogram @@ -130,6 +133,8 @@ Ocean utilities add_standard_regions_and_subset get_standard_region_names compute_zmid + compute_zinterface + vector_cell_to_edge_isotropic Sea ice tasks diff --git a/docs/developers_guide/docs.rst b/docs/developers_guide/docs.rst new file mode 100644 index 000000000..f211aee48 --- /dev/null +++ b/docs/developers_guide/docs.rst @@ -0,0 +1,32 @@ +Building the Documentation +========================== + +With the ``mpas_analysis_dev`` environment activated, you can run: + +.. code-block:: bash + + cd docs + DOCS_VERSION=test make clean versioned-html + +to build the docs locally in the ``_build/html`` subdirectory. + +The docs should build cleanly. If they don't, please attempt to fix the +errors and warnings even if they are not related to your changes. We want +to keep the documentation in good shape. + +Previewing the Documentation +---------------------------- + +When generating documentation on HPC machines, you will want to copy the html +output to the public web space to view it, or if the web portal is being +cranky, scp it to your local machine. + +To preview the documentation locally, open the ``index.html`` file in the +``_build/html/test`` directory with your browser or try: + +.. code-block:: bash + + cd _build/html + python -m http.server 8000 + +Then, open http://0.0.0.0:8000/test/ in your browser. diff --git a/docs/developers_guide/quick_start.rst b/docs/developers_guide/quick_start.rst new file mode 100644 index 000000000..8adf5cd14 --- /dev/null +++ b/docs/developers_guide/quick_start.rst @@ -0,0 +1,130 @@ +Quick Start for Developers +========================== + +This guide provides a condensed overview for developers to get started with +MPAS-Analysis development. + +1. Fork and Clone the Repository +-------------------------------- +- Fork `MPAS-Analysis `_ on GitHub. +- Clone the main repo and your fork locally: + - Create a base directory (e.g., ``mpas-analysis``). + - Clone the main repo: + + .. code-block:: bash + + git clone git@github.com:MPAS-Dev/MPAS-Analysis.git develop + + - Add your fork as a remote: + + .. code-block:: bash + + git remote add /MPAS-Analysis git@github.com:/MPAS-Analysis.git + +2. Configure Git +---------------- +- Set up your ``~/.gitconfig`` with your name and email (must match your + GitHub account). +- Recommended: set editor, color, and useful aliases. + +3. Set Up SSH Keys +------------------ +- Add SSH keys to GitHub for push access. +- See: https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account + +4. Create a Development Worktree +-------------------------------- +- Fetch latest changes: + + .. code-block:: bash + + git fetch --all -p + +- Create a worktree for your feature branch: + + .. code-block:: bash + + git worktree add ../ + +- Enter the worktree directory: + + .. code-block:: bash + + cd ../ + +5. Set Up Conda Environment +--------------------------- +- Install Miniforge3 (recommended) or Miniconda. +- For Miniconda, add ``conda-forge`` channel and set strict priority. +- Create environment: + + .. code-block:: bash + + conda create -y -n mpas_analysis_dev --file dev-spec.txt + +- Activate: + + .. code-block:: bash + + conda activate mpas_analysis_dev + +- Install MPAS-Analysis in edit mode: + + .. code-block:: bash + + python -m pip install --no-deps --no-build-isolation -e . + +6. Activate Environment (each session) +-------------------------------------- +- For bash: + + .. code-block:: bash + + source ~/miniforge3/etc/profile.d/conda.sh; conda activate mpas_analysis_dev + +- For csh: + + .. code-block:: csh + + source ~/miniforge3/etc/profile.d/conda.csh; conda activate mpas_analysis_dev + +7. Configure and Run MPAS-Analysis +---------------------------------- +- Copy and edit a config file (e.g., ``example_e3sm.cfg``) for your run. +- Set required options: ``mainRunName``, ``baseDirectory``, ``mpasMeshName``, output paths. +- Set ``mapMpiTasks = 1`` and ``mapParallelExec = None`` for development environments. +- Export HDF5 file locking variable if needed: + - Bash: + + .. code-block:: bash + + export HDF5_USE_FILE_LOCKING=FALSE + + - Csh: + + .. code-block:: csh + + setenv HDF5_USE_FILE_LOCKING FALSE + +- Run analysis: + + .. code-block:: bash + + mpas_analysis -m .cfg + +8. View Results +--------------- +- Output is a set of web pages in your specified output directory. +- On some systems, update permissions: + + .. code-block:: bash + + chmod -R ugo+rX + +- See the main web page for links to results and provenance info. + +Additional Recommendations +-------------------------- +- Use VS Code for remote editing and linting (optional). + +For more details, see the full :doc:`../tutorials/dev_getting_started`. diff --git a/docs/developers_guide/releasing.rst b/docs/developers_guide/releasing.rst new file mode 100644 index 000000000..4714cbde4 --- /dev/null +++ b/docs/developers_guide/releasing.rst @@ -0,0 +1,155 @@ +.. _dev_releasing: + +*********************** +Releasing a New Version +*********************** + +This document describes the steps for maintainers to tag and release a new +version of ``MPAS-Analysis``, and to update the conda-forge feedstock. + +Version Bump and Dependency Updates +=================================== + +1. **Update the Version Number** + + - Manually update the version number in the following files: + + - ``mpas_analysis/version.py`` + - ``ci/recipe/meta.yaml`` + + - Make sure the version follows `semantic versioning `_. + For release candidates, use versions like ``1.3.0rc1``. + +2. **Check and Update Dependencies** + + - Ensure that dependencies and their constraints are up-to-date and + consistent in: + + - ``ci/recipe/meta.yaml`` (for the conda-forge release) + + - ``pyproject.toml`` (for PyPI; used for sanity checks) + + - ``dev-spec.txt`` (development dependencies; should be a superset) + + - Use the GitHub "Compare" feature to check for dependency changes between + releases: + https://github.com/MPAS-Dev/MPAS-Analysis/compare + +3. **Make a PR and merge it** + + - Open a PR for the version bump and dependency changes and merge once + approved. + +Tagging and Publishing a Release Candidate +========================================== + +4. **Tagging a Release Candidate** + + - For release candidates, **do not create a GitHub release page**. Just + create a tag from the command line: + + - Make sure your changes are merged into ``develop`` or your own update + branch (e.g. ``update-to-1.3.0``) and your local repo is up to date. + + - Tag the release candidate (e.g., ``1.3.0rc1``): + + :: + + git checkout develop + git fetch --all -p + git reset --hard origin/develop + git tag 1.3.0rc1 + git push origin 1.3.0rc1 + + (Replace ``1.3.0rc1`` with your actual version, and ``develop`` with + your branch if needed.) + + **Note:** This will only create a tag. No release page will be created + on GitHub. + +5. **Updating the conda-forge Feedstock for a Release Candidate** + + - The conda-forge feedstock does **not** update automatically for release + candidates. + - You must always create a PR manually, and it must target the ``dev`` + branch of the feedstock. + + Steps: + + - Download the release tarball: + + :: + + wget https://github.com/MPAS-Dev/MPAS-Analysis/archive/refs/tags/.tar.gz + + - Compute the SHA256 checksum: + + :: + + shasum -a 256 .tar.gz + + - In the ``meta.yaml`` of the feedstock recipe: + - Set ``{% set version = "" %}`` + - Set the new ``sha256`` value + - Update dependencies if needed + + - Commit, push to a new branch, and open a PR **against the ``dev`` branch** + of the feedstock: + https://github.com/conda-forge/mpas-analysis-feedstock + + - Follow any instructions in the PR template and merge once approved + +Releasing a Stable Version +========================== + +6. **Publishing a Stable Release** + + - For stable releases, create a GitHub release page as follows: + + - Go to https://github.com/MPAS-Dev/MPAS-Analysis/releases + + - Click "Draft a new release" + + - Enter a tag (e.g., ``1.3.0``) + + - Set the release title to the version prefixed with ``v`` (e.g., + ``v1.3.0``) + + - Generate or manually write release notes + + - Click "Publish release" + +7. **Updating the conda-forge Feedstock for a Stable Release** + + - Wait for the ``regro-cf-autotick-bot`` to open a PR at: + https://github.com/conda-forge/mpas-analysis-feedstock + + - This may take several hours to a day. + + - Review the PR: + - Confirm the version bump and dependency changes + - Merge once CI checks pass + + **Note:** If you are impatient, you can accelerate this process by creating + a bot issue at: https://github.com/conda-forge/mpas-analysis-feedstock/issues + with the subject ``@conda-forge-admin, please update version``. This + will open a new PR with the version within a few minutes. + + - If the bot PR does not appear or is too slow, you may update manually (see + the manual steps for release candidates above, but target the ``main`` + branch of the feedstock). + +Post Release Actions +==================== + +8. **Verify and Announce** + + - Install the package in a clean environment to test: + + :: + + conda create -n test-mpas -c conda-forge mpas-analysis= + + - Optionally announce the release on relevant communication channels + + - Update any documentation or release notes as needed diff --git a/docs/developers_guide/test_suite.rst b/docs/developers_guide/test_suite.rst new file mode 100644 index 000000000..0372ff96b --- /dev/null +++ b/docs/developers_guide/test_suite.rst @@ -0,0 +1,148 @@ +Test Suite Infrastructure +========================= + +The `suite` directory provides a comprehensive infrastructure for testing +MPAS-Analysis on supported machines (Anvil, Chrysalis, Perlmutter-CPU, and +Compy). The suite is designed to ensure code changes do not introduce +unexpected results and to validate MPAS-Analysis in various environments. + +Overview of Test Scripts +------------------------ + +There are three main scripts for running the test suite: + +1. **run_dev_suite.bash** (Developer Testing) + + - Use this script after activating your development environment + (must be named `mpas_analysis_dev`). + + - It builds the documentation and runs a series of analysis tasks on output + from a low-resolution (QUwLI240) simulation. + + - Each task produces a web page with results, accessible via the web portal. + + - Example usage: + + .. code-block:: bash + + $ source ~/miniforge3/etc/profile.d/conda.sh + $ conda activate mpas_analysis_dev + $ ./suite/run_dev_suite.bash + + - After completion, check for successful web page generation, e.g.: + + .. code-block:: bash + + $ tail -n 3 chrysalis_test_suite/main_py3.11/mpas_analysis.o793058 + + The last lines should include: + + .. code-block:: none + + Generating webpage for viewing results... + Web page: https://web.lcrc.anl.gov/public/e3sm/diagnostic_output//analysis_testing/chrysalis//main_py3.11/ + + - To quickly identify unfinished or failed tasks: + + .. code-block:: bash + + $ grep -L "Web page:" chrysalis_test_suite/*/mpas_analysis.o* + + - Developers should run this suite manually on each pull request before + merging and link the results in the PR. + +2. **run_suite.bash** (Package Build & Test) + + - Use this script to build the MPAS-Analysis conda package and test it in + fresh environments. + + - It creates conda environments for multiple Python versions, runs tests, + builds documentation, and executes the analysis suite. + + - Recommended for more thorough validation, especially before releases. + + - Example usage: + + .. code-block:: bash + + $ ./suite/run_suite.bash + +3. **run_e3sm_unified_suite.bash** (E3SM-Unified Deployment Testing) + + - Used during test deployments of E3SM-Unified to verify MPAS-Analysis + works as expected within the deployment. + + - Typically run by E3SM-Unified maintainers during deployment testing. + + - Example usage: + + .. code-block:: bash + + $ ./suite/run_e3sm_unified_suite.bash + +Supported Machines +------------------ + +The suite is designed to run only on supported machines: + +- Anvil + +- Chrysalis + +- Perlmutter-CPU (`pm-cpu`) + +- Compy + +If you attempt to run the suite on an unsupported machine, you will receive an +error. + +Modifying the Test Suite +------------------------ + +Developers may need to update the suite for new requirements: + +- **Python Versions**: + + - The Python versions tested are defined in the scripts (e.g., + `main_py=3.11`, `alt_py=3.10`). + + - To test additional versions, add them to the relevant script variables and + loops. + +- **Adding New Machines**: + + - Update the machine detection logic in `suite/setup.py` and add appropriate + input/output paths for the new machine. + + - Ensure the new machine is supported in the scripts and the web portal + configuration. + +- **Adding/Modifying Tests**: + + - To add new tests, update the list of runs in the scripts and + provide corresponding config files in the `suite` directory. + + - New tests could change which analysis tasks are run, the configuration for + running tasks overall (e.g. how climatologies are computed), or how + individual tasks are configured (e.g. focused on polar regions vs. global) + +- **Changing Simulation Data**: + + - Update the simulation name and mesh in `suite/setup.py` if you wish to + test on different output. + +Best Practices +-------------- + +- Always run the test suite before merging a pull request. + +- Link the results web page in your PR for reviewers. + +- Use the quick check (`grep -L "Web page:" ...`) to ensure all tasks + completed successfully. + +- Update the suite scripts and configs as needed to keep pace with + MPAS-Analysis development. + +For more details, see the comments and documentation within each script and +config file in the `suite` directory. diff --git a/docs/generate_versions_json.py b/docs/generate_versions_json.py new file mode 100644 index 000000000..1ab13280e --- /dev/null +++ b/docs/generate_versions_json.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +import argparse +import json +import os +import re + + +def version_key(name): + """Key function for sorting versions.""" + match = re.match(r'^(\d+)\.(\d+)\.(\d+)$', name) + if match: + # Sort by major, minor, patch + return tuple(map(int, match.groups())) + return () + + +# Mode: local or production +parser = argparse.ArgumentParser( + description='Generate versions.json for MPAS Analysis documentation.') +parser.add_argument( + '--local', + action='store_true', + help='Generate versions.json for local build.' +) +args = parser.parse_args() +local = args.local +base_dir = '_build/html' if local else 'gh-pages' +shared_dir = os.path.join(base_dir, 'shared') + +entries = [] + +if not os.path.exists(base_dir) or not os.listdir(base_dir): + raise FileNotFoundError( + f"Base directory '{base_dir}' does not exist or is empty.") + +versions = os.listdir(base_dir) +numeric_versions = [] +non_numeric_versions = [] + +for version in versions: + # Check if it matches version pattern + if re.match(r'^\d+\.\d+\.\d+$', version): + numeric_versions.append(version) + else: + non_numeric_versions.append(version) + +# Sort numeric versions by major, minor, patch in descending order +numeric_versions.sort(key=version_key, reverse=True) +# Sort non-numeric versions alphabetically +non_numeric_versions.sort() + +# Combine the sorted lists +versions = non_numeric_versions + numeric_versions + +if 'main' in versions: + versions.insert(0, versions.pop(versions.index('main'))) + +for name in versions: + path = os.path.join(base_dir, name) + if os.path.isdir(path) and name not in ('shared', '.git'): + entries.append({ + 'version': name, + 'url': f'../{name}/' if local else f'/MPAS-Analysis/{name}/' + }) + +os.makedirs(shared_dir, exist_ok=True) +with open(os.path.join(shared_dir, 'versions.json'), 'w') as f: + json.dump(entries, f, indent=2) + diff --git a/docs/index.rst b/docs/index.rst index 8f65f7b00..5e6688fe7 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -28,6 +28,11 @@ used those components. :caption: Developer's guide :maxdepth: 2 + developers_guide/quick_start + developers_guide/docs + developers_guide/test_suite + developers_guide/releasing + developers_guide/api design_docs/index diff --git a/docs/shared/version-switcher.js b/docs/shared/version-switcher.js new file mode 100644 index 000000000..17b2d9c8a --- /dev/null +++ b/docs/shared/version-switcher.js @@ -0,0 +1,48 @@ +(async function () { + const container = document.getElementById("version-switcher"); + if (!container) return; + + const metaVersion = document.querySelector('meta[name="doc-version"]'); + const currentVersion = metaVersion ? metaVersion.content : "unknown"; + console.log("Detected current version:", currentVersion); + + async function fetchVersions() { + try { + const scriptUrl = document.currentScript.src; + const basePath = scriptUrl.substring(0, scriptUrl.lastIndexOf('/') + 1); + const versionsUrl = basePath + "versions.json"; + + const res = await fetch(versionsUrl); + if (!res.ok) throw new Error(`Failed to load ${versionsUrl}`); + return await res.json(); + } catch (err) { + console.error("Could not load versions.json:", err); + return []; + } + } + + const versions = await fetchVersions(); + if (!versions.length) return; + + const select = document.createElement("select"); + select.style.marginLeft = "1em"; + select.onchange = () => { + window.location.href = select.value; + }; + + versions.forEach(({ version, url }) => { + const option = document.createElement("option"); + option.value = url; + option.textContent = version; + if (version === currentVersion) { + option.selected = true; + } + select.appendChild(option); + }); + + const label = document.createElement("label"); + label.textContent = "Version: "; + label.style.color = "white"; + label.appendChild(select); + container.appendChild(label); +})(); diff --git a/docs/tutorials/dev_add_task.rst b/docs/tutorials/dev_add_task.rst index 0282cc641..4ae1c6f72 100644 --- a/docs/tutorials/dev_add_task.rst +++ b/docs/tutorials/dev_add_task.rst @@ -34,7 +34,7 @@ the code to MPAS-Analysis. If one just wishes to add a new field that already exists in MPAS-Ocean or MPAS-Seaice output, only a few of the steps below are necessary: - 1. Follow step 1 to set up an ```mpas_dev``` environment. + 1. Follow step 1 to set up an ```mpas_analysis_dev``` environment. 2. Copy an existing `ocean `_ or `sea_ice `_ python module to a new name and edit it as needed for the new fields. @@ -58,7 +58,7 @@ testing your new MPAS-Analysis development, and running MPAS-Analysis. Make sure you follow the tutorial for developers, not for users, since the tutorial for users installs the latest release of MPAS-Analysis, which you cannot modify. Similarly, changes must be tested in your own development - environment (often called ``mpas_dev``) rather than the in a shared + environment (often called ``mpas_analysis_dev``) rather than the in a shared environment like `E3SM-Unified `_. Then, please follow the :ref:`tutorial_understand_a_task`. This will give @@ -417,8 +417,8 @@ And here's the one for plotting it: matplotlib.rc('font', size=14) - x = descriptor.xCorner - y = descriptor.yCorner + x = descriptor.x_corner + y = descriptor.y_corner extent = [x[0], x[-1], y[0], y[-1]] @@ -550,12 +550,12 @@ whatever editor you like.) code . -I'll create or recreate my ``mpas_dev`` environment as in +I'll create or recreate my ``mpas_analysis_dev`` environment as in :ref:`tutorial_dev_getting_started`, and then make sure to at least do: .. code-block:: bash - conda activate mpas_dev + conda activate mpas_analysis_dev python -m pip install --no-deps --no-build-isolation -e . This last command installs the ``mpas_analysis`` package into the conda @@ -610,13 +610,13 @@ renaming the task are: Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask The task that produced the climatology to be remapped and plotted - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ @@ -997,7 +997,7 @@ script into the ``customize_masked_climatology()`` function: """ logger = self.logger - ds_mesh = xr.open_dataset(self.restartFileName) + ds_mesh = xr.open_dataset(self.meshFilename) ds_mesh = ds_mesh[['cellsOnEdge', 'cellsOnVertex', 'nEdgesOnCell', 'edgesOnCell', 'verticesOnCell', 'verticesOnEdge', 'dcEdge', 'dvEdge']] @@ -1138,12 +1138,12 @@ You also need to add the tasks class and public methods to the in the developer's guide. Again, the easiest approach is to copy the section for a similar task and modify as needed. -With the ``mpas_dev`` environment activated, you can run: +With the ``mpas_analysis_dev`` environment activated, you can run: .. code-block:: bash cd docs - make clean html + DOCS_VERSION=test make clean versioned-html to build the docs locally in the ``_build/html`` subdirectory. When generating documentation on HPC machines, you will want to copy the html output to the @@ -1197,13 +1197,13 @@ described in this tutorial: Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask The task that produced the climatology to be remapped and plotted - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ @@ -1313,7 +1313,7 @@ described in this tutorial: """ logger = self.logger - ds_mesh = xr.open_dataset(self.restartFileName) + ds_mesh = xr.open_dataset(self.meshFilename) ds_mesh = ds_mesh[['cellsOnEdge', 'cellsOnVertex', 'nEdgesOnCell', 'edgesOnCell', 'verticesOnCell', 'verticesOnEdge', 'dcEdge', 'dvEdge']] diff --git a/docs/tutorials/dev_getting_started.rst b/docs/tutorials/dev_getting_started.rst index 8ff064578..c7ad2821e 100644 --- a/docs/tutorials/dev_getting_started.rst +++ b/docs/tutorials/dev_getting_started.rst @@ -249,13 +249,13 @@ If you installed Miniforge3, these steps will happen automatically. 4.3 Create a development environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can create a new conda environment called ``mpas_dev`` and install the +You can create a new conda environment called ``mpas_analysis_dev`` and install the dependencies that MPAS-Analysis needs by running the following in the worktree where you are doing your development: .. code-block:: bash - $ conda create -y -n mpas_dev --file dev-spec.txt + $ conda create -y -n mpas_analysis_dev --file dev-spec.txt The last argument is only needed on HPC machines because the conda version of MPI doesn't work properly on these machines. You can omit it if you're @@ -266,21 +266,21 @@ mode by running: .. code-block:: bash - $ conda activate mpas_dev + $ conda activate mpas_analysis_dev $ python -m pip install --no-deps --no-build-isolation -e . In this mode, any edits you make to the code in the worktree will be available in the conda environment. If you run ``mpas_analysis`` on the command line, it will know about the changes. -This command only needs to be done once after the ``mpas_dev`` environment is +This command only needs to be done once after the ``mpas_analysis_dev`` environment is built if you are not using worktrees. .. note:: If you do use worktrees, rerun the ``python -m pip install ...`` command each time you switch to developing a new branch, since otherwise the - version of ``mpas_analysis`` in the ``mpas_dev`` environment will be the + version of ``mpas_analysis`` in the ``mpas_analysis_dev`` environment will be the one you were developing previously. .. _tutorial_dev_get_started_activ_env: @@ -288,20 +288,20 @@ built if you are not using worktrees. 4.4 Activating the environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Each time you open a new terminal window, to activate the ``mpas_dev`` +Each time you open a new terminal window, to activate the ``mpas_analysis_dev`` environment, you will need to run either for ``bash``: .. code-block:: bash $ source ~/miniforge3/etc/profile.d/conda.sh - $ conda activate mpas_dev + $ conda activate mpas_analysis_dev or for ``csh``: .. code-block:: csh > source ~/miniforge3/etc/profile.d/conda.csh - > conda activate mpas_dev + > conda activate mpas_analysis_dev You can skip the ``source`` command if you chose to initialize Miniforge3 or Miniconda3 so it loads automatically. You can also use the ``init_conda`` @@ -311,8 +311,8 @@ alias for this step if you defined one. ~~~~~~~~~~~~~~~~~~~~~~~ If you switch to a different worktree, it is safest to rerun the whole -process for creating the ``mpas_dev`` conda environment. If you know that -the dependencies are the same as the worktree used to create ``mpas_dev``, +process for creating the ``mpas_analysis_dev`` conda environment. If you know that +the dependencies are the same as the worktree used to create ``mpas_analysis_dev``, You can just reinstall ``mpas_analysis`` itself by rerunning .. code-block:: bash @@ -320,7 +320,7 @@ You can just reinstall ``mpas_analysis`` itself by rerunning python -m pip install --no-deps --no-build-isolation -e . in the new worktree. If you forget this step, you will find that changes you -make in the worktree don't affect the ``mpas_dev`` conda environment you are +make in the worktree don't affect the ``mpas_analysis_dev`` conda environment you are using. 5. Editing code @@ -348,7 +348,7 @@ need to follow steps 2-6 of the :ref:`tutorial_getting_started` tutorial. Run ``mpas_analysis`` on a compute node, not on an HPC login nodes (front ends), because it uses too many resources to be safely run on a login node. - When using a compute node interactively, activate the ``mpas_dev`` + When using a compute node interactively, activate the ``mpas_analysis_dev`` environment, even if it was activated on the login node. Be sure to 7.1 Configuring MPAS-Analysis @@ -688,7 +688,7 @@ also be displayed over the full 5 years.) The hard work is done. Now that we have a config file, we are ready to run. To run MPAS-Analysis, you should either create a job script or log into -an interactive session on a compute node. Then, activate the ``mpas_dev`` +an interactive session on a compute node. Then, activate the ``mpas_analysis_dev`` conda environment as in :ref:`tutorial_dev_get_started_activ_env`. On many file systems, MPAS-Analysis and other python-based software that used @@ -724,15 +724,15 @@ Typical output is the analysis is running correctly looks something like: Detected E3SM supported machine: anvil Using the following config files: /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/mpas_analysis/default.cfg - /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/lib/python3.10/site-packages/mache/machines/anvil.cfg + /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_analysis_dev/lib/python3.10/site-packages/mache/machines/anvil.cfg /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/mpas_analysis/configuration/anvil.cfg /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/mpas_analysis/__main__.py /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/myrun.cfg copying /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/myrun.cfg to HTML dir. - running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp76l7of28/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp76l7of28/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_0.5x0.5degree_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --ignore_unmapped - running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpj94wpf9y/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpj94wpf9y/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_6000.0x6000.0km_10.0km_Antarctic_stereo_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped - running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp6zm13a0s/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp6zm13a0s/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_WOCE_transects_5km_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped + running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_analysis_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp76l7of28/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp76l7of28/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_0.5x0.5degree_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --ignore_unmapped + running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_analysis_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpj94wpf9y/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpj94wpf9y/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_6000.0x6000.0km_10.0km_Antarctic_stereo_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped + running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_analysis_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp6zm13a0s/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp6zm13a0s/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_WOCE_transects_5km_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped Preprocessing SOSE transect data... temperature salinity @@ -741,7 +741,7 @@ Typical output is the analysis is running correctly looks something like: meridionalVelocity velMag Done. - running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpe2a9yblb/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpe2a9yblb/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_SOSE_transects_5km_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped + running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_analysis_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpe2a9yblb/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpe2a9yblb/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_SOSE_transects_5km_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped Running tasks: 100% |##########################################| Time: 0:06:42 diff --git a/docs/tutorials/dev_understand_a_task.rst b/docs/tutorials/dev_understand_a_task.rst index b27607c04..46a123a5a 100644 --- a/docs/tutorials/dev_understand_a_task.rst +++ b/docs/tutorials/dev_understand_a_task.rst @@ -242,7 +242,7 @@ super class's ``__init__()`` method: Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -252,7 +252,7 @@ super class's ``__init__()`` method: The task that produced the climatology from the first year to be remapped and then subtracted from the main climatology - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ @@ -301,9 +301,8 @@ find something unexpected: raise ValueError(f'config section {section_name} does not contain ' f'valid list of comparison grids') - depth_ranges = config.getexpression('climatologyMapOHCAnomaly', - 'depthRanges', - use_numpyfunc=True) + depth_ranges = config.getnumpy('climatologyMapOHCAnomaly', + 'depthRanges') By default, these config options look like this: @@ -779,8 +778,8 @@ at that before we continue with ``customize_masked_climatology()``. Compute the OHC from the temperature and layer thicknesses in a given climatology data sets. """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) # specific heat [J/(kg*degC)] cp = self.namelist.getfloat('config_specific_heat_sea_water') @@ -789,10 +788,10 @@ at that before we continue with ``customize_masked_climatology()``. units_scale_factor = 1e-9 - n_vert_levels = ds_restart.sizes['nVertLevels'] + n_vert_levels = ds_mesh.sizes['nVertLevels'] - z_mid = compute_zmid(ds_restart.bottomDepth, ds_restart.maxLevelCell-1, - ds_restart.layerThickness) + z_mid = compute_zmid(ds_mesh.bottomDepth, ds_mesh.maxLevelCell-1, + ds_mesh.layerThickness) vert_index = xr.DataArray.from_dict( {'dims': ('nVertLevels',), 'data': np.arange(n_vert_levels)}) @@ -800,7 +799,7 @@ at that before we continue with ``customize_masked_climatology()``. temperature = climatology['timeMonthly_avg_activeTracers_temperature'] layer_thickness = climatology['timeMonthly_avg_layerThickness'] - masks = [vert_index < ds_restart.maxLevelCell, + masks = [vert_index < ds_mesh.maxLevelCell, z_mid <= self.min_depth, z_mid >= self.max_depth] for mask in masks: @@ -812,7 +811,7 @@ at that before we continue with ``customize_masked_climatology()``. return ohc This function uses a combination of mesh information taken from an MPAS -restart file (available from the ``self.restartFileName`` attribute inherited +mesh file (available from the ``self.meshFilename`` attribute inherited from :py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask`), namelist options available from the ``self.namelist`` reader (inherited from :py:class:`~mpas_analysis.shared.AnalysisTask`), and ``temperature`` and @@ -904,7 +903,7 @@ here is the full analysis task as described in this tutorial: Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -914,7 +913,7 @@ here is the full analysis task as described in this tutorial: The task that produced the climatology from the first year to be remapped and then subtracted from the main climatology - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ @@ -944,9 +943,8 @@ here is the full analysis task as described in this tutorial: raise ValueError(f'config section {section_name} does not contain ' f'valid list of comparison grids') - depth_ranges = config.getexpression('climatologyMapOHCAnomaly', - 'depthRanges', - use_numpyfunc=True) + depth_ranges = config.getnumpy('climatologyMapOHCAnomaly', + 'depthRanges') mpas_field_name = 'deltaOHC' @@ -1160,8 +1158,8 @@ here is the full analysis task as described in this tutorial: Compute the OHC from the temperature and layer thicknesses in a given climatology data sets. """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) # specific heat [J/(kg*degC)] cp = self.namelist.getfloat('config_specific_heat_sea_water') @@ -1170,10 +1168,10 @@ here is the full analysis task as described in this tutorial: units_scale_factor = 1e-9 - n_vert_levels = ds_restart.sizes['nVertLevels'] + n_vert_levels = ds_mesh.sizes['nVertLevels'] - z_mid = compute_zmid(ds_restart.bottomDepth, ds_restart.maxLevelCell-1, - ds_restart.layerThickness) + z_mid = compute_zmid(ds_mesh.bottomDepth, ds_mesh.maxLevelCell-1, + ds_mesh.layerThickness) vert_index = xr.DataArray.from_dict( {'dims': ('nVertLevels',), 'data': np.arange(n_vert_levels)}) @@ -1181,7 +1179,7 @@ here is the full analysis task as described in this tutorial: temperature = climatology['timeMonthly_avg_activeTracers_temperature'] layer_thickness = climatology['timeMonthly_avg_layerThickness'] - masks = [vert_index < ds_restart.maxLevelCell, + masks = [vert_index < ds_mesh.maxLevelCell, z_mid <= self.min_depth, z_mid >= self.max_depth] for mask in masks: diff --git a/docs/users_guide/analysis_tasks.rst b/docs/users_guide/analysis_tasks.rst index d5f2c83c9..0d36779ad 100644 --- a/docs/users_guide/analysis_tasks.rst +++ b/docs/users_guide/analysis_tasks.rst @@ -9,6 +9,7 @@ Analysis Tasks tasks/climatologyMapArgoTemperature tasks/climatologyMapBGC tasks/climatologyMapBSF + tasks/climatologyMapCustom tasks/climatologyMapEKE tasks/climatologyMapHeatFluxes tasks/climatologyMapMassFluxes @@ -22,11 +23,12 @@ Analysis Tasks tasks/climatologyMapSSH tasks/climatologyMapVel tasks/climatologyMapWaves + tasks/climatologyMapWindStressCurl tasks/climatologyMapWoa tasks/antshipTransects tasks/conservation - tasks/geojsonTransects + tasks/geojsonNetcdfTransects tasks/hovmollerOceanRegions tasks/indexNino34 tasks/meridionalHeatTransport diff --git a/docs/users_guide/tasks/climatologyMapCustom.rst b/docs/users_guide/tasks/climatologyMapCustom.rst new file mode 100644 index 000000000..98f33cc23 --- /dev/null +++ b/docs/users_guide/tasks/climatologyMapCustom.rst @@ -0,0 +1,260 @@ +.. _task_climatologyMapCustom: + +climatologyMapCustom +==================== + +An analysis task for plotting custom climatologies at various depths. This task +can plot both 2D and 3D variables on cells, the latter with both +``nVertlevels`` -- layer centers -- or ``nVertLevelsP1`` -- layer interfaces +-- as the vertical dimension. The task is designed to be highly coustomizable +via config sections and options, as described below. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapCustom] + ## options related to plotting climatology maps of any field at various depths + ## (if they include a depth dimension) without observatons for comparison + + # comparison grid(s) + comparisonGrids = ['latlon'] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # list of depths in meters (positive up) at which to analyze, 'top' for the + # sea surface, 'bot' for the sea floor + depths = ['top', -200, -400, -600, -800, -1000, -1500, -2000, 'bot'] + + # a list of variables available to plot. New variables can be added as long + # as they correspond to a single field already found in MPAS-Ocean's + # timeSeriesStatsMonthly output. Add the 'name', 'title', 'units' (with $$ + # instead a single dollar sign for the config parser), and 'mpas'(the + # timeSeriesStatsMonthly variable name as a single-item list) entries for each + # variable. Then, add a section below climatologyMapCustom with + # the colormap settings for that variable. + availableVariables = { + 'temperature': + {'title': 'Potential Temperature', + 'units': r'$$^\circ$$C', + 'mpas': ['timeMonthly_avg_activeTracers_temperature']}, + 'salinity': + {'title': 'Salinity', + 'units': 'PSU', + 'mpas': ['timeMonthly_avg_activeTracers_salinity']}, + 'potentialDensity': + {'title': 'Potential Density', + 'units': 'kg m$$^{-3}$$', + 'mpas': ['timeMonthly_avg_potentialDensity']}, + 'thermalForcing': + {'title': 'Thermal Forcing', + 'units': r'$$^\circ$$C', + 'mpas': ['timeMonthly_avg_activeTracers_temperature', + 'timeMonthly_avg_activeTracers_salinity', + 'timeMonthly_avg_density', + 'timeMonthly_avg_activeTracers_layerThickness']}, + 'zonalVelocity': + {'title': 'Zonal Velocity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_velocityZonal']}, + 'meridionalVelocity': + {'title': 'Meridional Velocity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_velocityMeridional']}, + 'velocityMagnitude': + {'title': 'Zonal Velocity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_velocityZonal', + 'timeMonthly_avg_velocityMeridional']}, + 'vertVelocity': + {'title': 'Vertical Velocity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_vertVelocityTop']}, + 'vertDiff': + {'title': 'Vertical Diffusivity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_vertDiffTopOfCell']}, + 'vertVisc': + {'title': 'Vertical Viscosity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_vertViscTopOfCell']}, + 'mixedLayerDepth': + {'title': 'Mixed Layer Depth', + 'units': 'm', + 'mpas': ['timeMonthly_avg_dThreshMLD']}, + } + + # a list of fields top plot for each transect. All supported fields are listed + # below + variables = [] + + + [climatologyMapCustomTemperature] + ## options related to plotting climatology maps of potential temperature at + ## various levels, including the sea surface and sea floor, possibly against + ### control model results + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -2., 'vmax': 10.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 10., 9) + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -5., 'vmax': 5.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-5., 5., 9) + + [climatologyMapCustomSalinity] + colormapNameResult = haline + colormapTypeResult = continuous + normTypeResult = linear + normArgsResult = {'vmin': 32.2, 'vmax': 35.5} + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = linear + normArgsDifference = {'vmin': -1.5, 'vmax': 1.5} + + [climatologyMapCustomPotentialDensity] + colormapNameResult = Spectral_r + colormapTypeResult = continuous + normTypeResult = linear + normArgsResult = {'vmin': 1026.5, 'vmax': 1028.} + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = linear + normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} + + [climatologyMapCustomThermalForcing] + colormapNameResult = thermal + colormapTypeResult = continuous + normTypeResult = linear + normArgsResult = {'vmin': -1., 'vmax': 5.} + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = linear + normArgsDifference = {'vmin': -3., 'vmax': 3.} + + [climatologyMapCustomZonalVelocity] + colormapNameResult = delta + colormapTypeResult = continuous + normTypeResult = linear + normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = linear + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + + [climatologyMapCustomMeridionalVelocity] + colormapNameResult = delta + colormapTypeResult = continuous + normTypeResult = linear + normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = linear + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + + [climatologyMapCustomVelocityMagnitude] + colormapNameResult = ice + colormapTypeResult = continuous + normTypeResult = log + normArgsResult = {'vmin': 1.e-3, 'vmax': 1.} + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = linear + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + + [climatologyMapCustomVertVelocity] + colormapNameResult = delta + colormapTypeResult = continuous + normTypeResult = linear + normArgsResult = {'vmin': -1e-5, 'vmax': 1e-5} + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = linear + normArgsDifference = {'vmin': -1e-5, 'vmax': 1e-5} + + [climatologyMapCustomVertDiff] + colormapNameResult = rain + colormapTypeResult = continuous + normTypeResult = log + normArgsResult = {'vmin': 1e-6, 'vmax': 1.} + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = linear + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + + [climatologyMapCustomVertVisc] + colormapNameResult = rain + colormapTypeResult = continuous + normTypeResult = log + normArgsResult = {'vmin': 1e-6, 'vmax': 1.} + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = linear + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + + [climatologyMapCustomMixedLayerDepth] + colormapNameResult = viridis + colormapTypeResult = continuous + normTypeResult = log + normArgsResult = {'vmin': 10., 'vmax': 300.} + colorbarTicksResult = [10, 20, 40, 60, 80, 100, 200, 300] + colormapNameDifference = balance + colormapTypeDifference = continuous + normTypeDifference = symLog + normArgsDifference = {'linthresh': 10., 'linscale': 0.5, 'vmin': -200., + 'vmax': 200.} + colorbarTicksDifference = [-200., -100., -50., -20., -10., 0., 10., 20., 50., 100., 200.] + + +There is a section for options that apply to all custom climatology maps and +one each for any available variables to plot. + +The option ``availableVariables`` is a dictionary with the names of the +variables available to plot as keys and dictionaries with the title, units, +and MPAS variable name(s) as values. New entries can be added as long as they +correspond to a single field already found in MPAS-Ocean's +``timeSeriesStatsMonthly`` output. For each variable, a section with the name +``climatologyMapCustom`` should be added with the colormap +settings for that variable, see :ref:`config_colormaps` for details. + +The option ``depths`` is a list of (approximate) depths at which to sample +the potential temperature field. A value of ``'top'`` indicates the sea +surface (or the ice-ocean interface under ice shelves) while a value of +``'bot'`` indicates the seafloor. + +By default, no fields are plotted. A user can select which fields to plot by +adding the desired field names to the ``variables`` list. + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Example Result +-------------- + +.. image:: examples/clim_map_custom.png + :width: 500 px + :align: center diff --git a/docs/users_guide/tasks/climatologyMapSeaIceConcNH.rst b/docs/users_guide/tasks/climatologyMapSeaIceConcNH.rst index d4516f68d..40b160a39 100644 --- a/docs/users_guide/tasks/climatologyMapSeaIceConcNH.rst +++ b/docs/users_guide/tasks/climatologyMapSeaIceConcNH.rst @@ -18,11 +18,13 @@ The following configuration options are available for this task:: [climatologyMapSeaIceConcNH] ## options related to plotting horizontally remapped climatologies of - ## sea ice concentration against reference model results and observations + ## sea ice concentration against control model results and observations ## in the northern hemisphere (NH) # colormap for model/observations colormapNameResult = ice + # whether the colormap is indexed or continuous + colormapTypeResult = indexed # color indices into colormapName for filled contours colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] # colormap levels/values for contour boundaries @@ -30,23 +32,19 @@ The following configuration options are available for this task:: # colormap for differences colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed # color indices into colormapName for filled contours - colormapIndicesDifference = [0, 32, 64, 96, 112, 128, 128, 144, 160, 192, - 224, 255] + colormapIndicesDifference = [0, 0, 26, 51, 77, 102, 128, 128, 153, 179, 204, 230, 255, 255] # colormap levels/values for contour boundaries - colorbarLevelsDifference = [-1., -0.8, -0.6, -0.4, -0.2, -0.1, 0, 0.1, 0.2, - 0.4, 0.6, 0.8, 1.] + colorbarLevelsDifference = [-0.5, -0.4, -0.3, -0.2, -0.1, -0.05, 0, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5] # Months or seasons to plot (These should be left unchanged, since # observations are only available for these seasons) seasons = ['JFM', 'JAS'] - # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis - comparisonGrids = ['latlon'] - - # reference lat/lon for sea ice plots in the northern hemisphere - minimumLatitude = 50 - referenceLongitude = 0 + # comparison grid(s) (typically 'arctic_extended') on which to plot analysis + comparisonGrids = ['arctic_extended'] # a list of prefixes describing the sources of the observations to be used observationPrefixes = ['NASATeam', 'Bootstrap'] @@ -54,6 +52,9 @@ The following configuration options are available for this task:: # arrange subplots vertically? vertical = False + # the minimum threshold below which concentration is masked out + minConcentration = 0.15 + # observations files concentrationNASATeamNH_JFM = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_NH_jfm.interp0.5x0.5_20180710.nc concentrationNASATeamNH_JAS = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_NH_jas.interp0.5x0.5_20180710.nc diff --git a/docs/users_guide/tasks/climatologyMapSeaIceConcSH.rst b/docs/users_guide/tasks/climatologyMapSeaIceConcSH.rst index 4ca3219a8..844bc2abf 100644 --- a/docs/users_guide/tasks/climatologyMapSeaIceConcSH.rst +++ b/docs/users_guide/tasks/climatologyMapSeaIceConcSH.rst @@ -18,11 +18,13 @@ The following configuration options are available for this task:: [climatologyMapSeaIceConcSH] ## options related to plotting horizontally remapped climatologies of - ## sea ice concentration against reference model results and observations + ## sea ice concentration against control model results and observations ## in the southern hemisphere (SH) # colormap for model/observations colormapNameResult = ice + # whether the colormap is indexed or continuous + colormapTypeResult = indexed # color indices into colormapName for filled contours colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] # colormap levels/values for contour boundaries @@ -30,23 +32,19 @@ The following configuration options are available for this task:: # colormap for differences colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed # color indices into colormapName for filled contours - colormapIndicesDifference = [0, 32, 64, 96, 112, 128, 128, 144, 160, 192, - 224, 255] + colormapIndicesDifference = [0, 0, 26, 51, 77, 102, 128, 128, 153, 179, 204, 230, 255, 255] # colormap levels/values for contour boundaries - colorbarLevelsDifference = [-1., -0.8, -0.6, -0.4, -0.2, -0.1, 0, 0.1, 0.2, - 0.4, 0.6, 0.8, 1.] + colorbarLevelsDifference = [-0.5, -0.4, -0.3, -0.2, -0.1, -0.05, 0, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5] # Months or seasons to plot (These should be left unchanged, since # observations are only available for these seasons) seasons = ['DJF', 'JJA'] - # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis - comparisonGrids = ['latlon'] - - # reference lat/lon for sea ice plots in the northern hemisphere - minimumLatitude = -50 - referenceLongitude = 180 + # comparison grid(s) (typically 'antarctic_extended') on which to plot analysis + comparisonGrids = ['antarctic_extended'] # a list of prefixes describing the sources of the observations to be used observationPrefixes = ['NASATeam', 'Bootstrap'] @@ -54,6 +52,9 @@ The following configuration options are available for this task:: # arrange subplots vertically? vertical = False + # the minimum threshold below which concentration is masked out + minConcentration = 0.15 + # observations files concentrationNASATeamSH_DJF = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_SH_djf.interp0.5x0.5_20180710.nc concentrationNASATeamSH_JJA = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_SH_jja.interp0.5x0.5_20180710.nc diff --git a/docs/users_guide/tasks/climatologyMapSeaIceThickNH.rst b/docs/users_guide/tasks/climatologyMapSeaIceThickNH.rst index dd01c247f..eb74cd057 100644 --- a/docs/users_guide/tasks/climatologyMapSeaIceThickNH.rst +++ b/docs/users_guide/tasks/climatologyMapSeaIceThickNH.rst @@ -19,11 +19,13 @@ The following configuration options are available for this task:: [climatologyMapSeaIceThickNH] ## options related to plotting horizontally remapped climatologies of - ## sea ice thickness against reference model results and observations + ## sea ice thickness against control model results and observations ## in the northern hemisphere (NH) # colormap for model/observations - colormapNameResult = ice + colormapNameResult = davos + # whether the colormap is indexed or continuous + colormapTypeResult = indexed # color indices into colormapName for filled contours colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] # colormap levels/values for contour boundaries @@ -31,8 +33,10 @@ The following configuration options are available for this task:: # colormap for differences colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed # color indices into colormapName for filled contours - colormapIndicesDifference = [0, 32, 64, 96, 128, 128, 160, 192, 224, 255] + colormapIndicesDifference = [0, 0, 32, 64, 96, 128, 128, 160, 192, 223, 255, 255] # colormap levels/values for contour boundaries colorbarLevelsDifference = [-3., -2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5, 3.] @@ -40,12 +44,8 @@ The following configuration options are available for this task:: # observations are only available for these seasons) seasons = ['FM', 'ON'] - # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis - comparisonGrids = ['latlon'] - - # reference lat/lon for sea ice plots in the northern hemisphere - minimumLatitude = 50 - referenceLongitude = 0 + # comparison grid(s) (typically 'arctic_extended') on which to plot analysis + comparisonGrids = ['arctic_extended'] # a list of prefixes describing the sources of the observations to be used observationPrefixes = [''] diff --git a/docs/users_guide/tasks/climatologyMapSeaIceThickSH.rst b/docs/users_guide/tasks/climatologyMapSeaIceThickSH.rst index 883faeb5b..486e6742a 100644 --- a/docs/users_guide/tasks/climatologyMapSeaIceThickSH.rst +++ b/docs/users_guide/tasks/climatologyMapSeaIceThickSH.rst @@ -19,11 +19,13 @@ The following configuration options are available for this task:: [climatologyMapSeaIceThickSH] ## options related to plotting horizontally remapped climatologies of - ## sea ice thickness against reference model results and observations + ## sea ice thickness against control model results and observations ## in the southern hemisphere (SH) # colormap for model/observations - colormapNameResult = ice + colormapNameResult = davos + # whether the colormap is indexed or continuous + colormapTypeResult = indexed # color indices into colormapName for filled contours colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] # colormap levels/values for contour boundaries @@ -31,8 +33,10 @@ The following configuration options are available for this task:: # colormap for differences colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed # color indices into colormapName for filled contours - colormapIndicesDifference = [0, 32, 64, 96, 128, 128, 160, 192, 224, 255] + colormapIndicesDifference = [0, 0, 32, 64, 96, 128, 128, 160, 192, 223, 255, 255] # colormap levels/values for contour boundaries colorbarLevelsDifference = [-3., -2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5, 3.] @@ -40,12 +44,8 @@ The following configuration options are available for this task:: # observations are only available for these seasons) seasons = ['FM', 'ON'] - # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis - comparisonGrids = ['latlon'] - - # reference lat/lon for sea ice plots in the northern hemisphere - minimumLatitude = -50 - referenceLongitude = 180 + # comparison grid(s) (typically 'antarctic_extended') on which to plot analysis + comparisonGrids = ['antarctic_extended'] # a list of prefixes describing the sources of the observations to be used observationPrefixes = [''] diff --git a/docs/users_guide/tasks/climatologyMapWindStressCurl.rst b/docs/users_guide/tasks/climatologyMapWindStressCurl.rst new file mode 100644 index 000000000..ac14b36e8 --- /dev/null +++ b/docs/users_guide/tasks/climatologyMapWindStressCurl.rst @@ -0,0 +1,68 @@ +.. _task_climatologyMapWindStressCurl: + +climatologyMapWindStressCurl +============================ + +An analysis task for plotting maps of wind stress curl. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, windStressCurl, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapWindStressCurl] + ## options related to plotting horizontally remapped climatologies of + ## wind stress curl against control model results + + # colormap for model/observations + colormapNameResult = cmo.curl + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -1e-6, 'vmax': 1e-6} + + # colormap for differences + colormapNameDifference = cmo.balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -2e-7, 'vmax': 2e-7} + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # comparison grid(s) on which to plot analysis + comparisonGrids = ['latlon'] + +For analysis focused on polar regions (using the ``--polar_regions`` flag), +the following config options add Arctic and Antarctic comparison grids:: + + [climatologyMapWindStressCurl] + ## options related to plotting horizontally remapped climatologies of + ## wind stress curl against control model results + + # comparison grid(s) on which to plot analysis + comparisonGrids = ['latlon', 'arctic_extended', 'antarctic_extended'] + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Example Result +-------------- + +.. image:: examples/wind_stress_curl.png + :width: 500 px + :align: center diff --git a/docs/users_guide/tasks/examples/clim_map_custom.png b/docs/users_guide/tasks/examples/clim_map_custom.png new file mode 100644 index 000000000..2eb60063d Binary files /dev/null and b/docs/users_guide/tasks/examples/clim_map_custom.png differ diff --git a/docs/users_guide/tasks/examples/wind_stress_curl.png b/docs/users_guide/tasks/examples/wind_stress_curl.png new file mode 100644 index 000000000..8dd3b9f27 Binary files /dev/null and b/docs/users_guide/tasks/examples/wind_stress_curl.png differ diff --git a/docs/users_guide/tasks/geojsonNetcdfTransects.rst b/docs/users_guide/tasks/geojsonNetcdfTransects.rst new file mode 100644 index 000000000..04604aa8e --- /dev/null +++ b/docs/users_guide/tasks/geojsonNetcdfTransects.rst @@ -0,0 +1,448 @@ +.. _task_geojsonNetcdfTransects: + +geojsonNetcdfTransects +====================== + +An analysis task for interpolating MPAS fields to transects specified by files +in geojson or NetDF format. + +Component and Tags:: + + component: ocean + tags: climatology, transect, geojson, netcdf + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [geojsonNetcdfTransects] + ## options related to plotting model transects at points determined by a + ## user-specified geojson or NetCDF file. + ## + ## To generate your own geojson file, go to: + ## http://geojson.io/ + ## and draw one or more polylines, then add a name to each: + ## + ## "properties": { + ## "name": "My Favorite Name" + ## }, + ## and save the file as GeoJSON (say transects.geojson). Finally, set the + ## option: + ## geojsonFiles = ['transects.geojson'] + ## (giving an absolute path if necessary) in your custom config file. + ## + ## If you provide a NetCDF file instead, it simply needs to have 'lat` and + ## `lon` variables. The `lat` and `lon` variables should be 1D arrays + ## with the same dimension name (e.g. 'nPoints'). The name of the file + ## (without the base path or extension) will serve as the transect name with + ## underscores converted to spaces. + + # a list of geojson and/or NetCDF files. The geojson files must contain + # lat/lon points in LineStrings to be plotted. The NetCDF files need 'lat' + # and 'lon' variables with the same dimesion name. If relative paths are + # given, they are relative to the current working directory. The files must + # be listed in quotes, e.g.: + # geojsonOrNetcdfFiles = ['file1.geojson', '/path/to/file2.geojson', 'file3.nc'] + geojsonOrNetcdfFiles = [] + + # a list of dictionaries for each field to plot. The dictionary includes + # prefix (used for file names, task names and sections) as well as the mpas + # name of the field, units for colorbars and a the name as it should appear + # in figure titles and captions. + fields = + [{'prefix': 'temperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$$\degree$$C', + 'titleName': 'Potential Temperature'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$$^{-3}$$', + 'titleName': 'Potential Density'}, + {'prefix': 'zonalVelocity', + 'mpas': 'timeMonthly_avg_velocityZonal', + 'units': r'm s$$^{-1}$$', + 'titleName': 'Zonal Velocity'}, + {'prefix': 'meridionalVelocity', + 'mpas': 'timeMonthly_avg_velocityMeridional', + 'units': r'm s$$^{-1}$$', + 'titleName': 'Meridional Velocity'}, + {'prefix': 'vertVelocity', + 'mpas': 'timeMonthly_avg_vertVelocityTop', + 'units': r'm s$$^{-1}$$', + 'titleName': 'Vertical Velocity'}, + {'prefix': 'vertDiff', + 'mpas': 'timeMonthly_avg_vertDiffTopOfCell', + 'units': r'm s$$^{-1}$$', + 'titleName': 'Vertical Diffusivity'}, + {'prefix': 'vertVisc', + 'mpas': 'timeMonthly_avg_vertViscTopOfCell', + 'units': r'm s$$^{-1}$$', + 'titleName': 'Vertical Viscosity'}, + ] + + # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # The approximate horizontal resolution (in km) of each transect. Latitude/ + # longitude between observation points will be subsampled at this interval. + # Use 'obs' to indicate no subsampling. Use 'mpas' to indicate plotting of + # model data on the native grid. + #horizontalResolution = mpas + #horizontalResolution = obs + horizontalResolution = 5 + + # The name of the vertical comparison grid. Valid values are 'mpas' for the + # MPAS vertical grid or any other name if the vertical grid is defined by + # 'verticalComparisonGrid' + #verticalComparisonGridName = mpas + verticalComparisonGridName = uniform_0_to_4000m_at_10m + + # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas'. + # This should be numpy array of (typically negative) elevations (in m). + # The first and last entries are used as axis bounds for 'mpas' vertical + # comparison grids + verticalComparisonGrid = numpy.linspace(0, -4000, 401) + + # A range for the y axis (if any) + verticalBounds = [] + + # The minimum weight of a destination cell after remapping. Any cell with + # weights lower than this threshold will therefore be masked out. + renormalizationThreshold = 0.01 + + + [geojsonNetcdfTemperatureTransects] + ## options related to plotting geojson transects of potential temperature + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -2., 'vmax': 30.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsResult = [] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -2., 'vmax': 2.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-2., 2., 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsDifference = [] + + + [geojsonNetcdfSalinityTransects] + ## options related to plotting geojson transects of salinity + + # colormap for model/observations + colormapNameResult = haline + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 30, 'vmax': 39.0} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(34.2, 35.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsResult = [] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsDifference = [] + + + [geojsonNetcdfPotentialDensityTransects] + ## options related to plotting geojson transects of potential density + + # colormap for model/observations + colormapNameResult = Spectral_r + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 1026.5, 'vmax': 1028.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(1026., 1028., 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsResult = [] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsDifference = [] + + + [geojsonNetcdfZonalVelocityTransects] + ## options related to plotting geojson transects of zonal velocity + + # colormap for model/observations + colormapNameResult = delta + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.05, 'vmax': 0.05} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsResult = 'none' + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.05, 'vmax': 0.05} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsDifference = 'none' + + + [geojsonNetcdfMeridionalVelocityTransects] + ## options related to plotting geojson transects of meridional velocity + + # colormap for model/observations + colormapNameResult = delta + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.05, 'vmax': 0.05} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsResult = 'none' + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.05, 'vmax': 0.05} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsDifference = 'none' + + [geojsonNetcdfVertVelocityTransects] + ## options related to plotting geojson transects of meridional velocity + + # colormap for model/observations + colormapNameResult = delta + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -1e-5, 'vmax': 1e-5} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsResult = 'none' + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -1e-5, 'vmax': 1e-5} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsDifference = 'none' + + [geojsonNetcdfVertDiffTransects] + ## options related to plotting geojson transects of meridional velocity + + # colormap for model/observations + colormapNameResult = diff + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.5, 'vmax': 0.5} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsResult = 'none' + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsDifference = 'none' + + [geojsonNetcdfVertViscTransects] + ## options related to plotting geojson transects of meridional velocity + + # colormap for model/observations + colormapNameResult = diff + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -1., 'vmax': 1.} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsResult = 'none' + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -1., 'vmax': 1.} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevelsDifference = 'none' + + +Geojson Files +------------- + +This task takes a list of geojson or NetCDF file names (supplied as a python +list of ``str``):: + + geojsonOrNetcdfFiles = ['file1.geojson', '/path/to/file2.geojson'] + +Geojson transects are specified by ``LineString`` objects in the files. Some +examples are provided in the `MPAS geometric features repository`_. You can +also generate your own very easily at To generate your own geojson file, go to +`geojson.io`_ and draw one or more polylines, then add a name to each:: + + ... + "properties": { + "name": "My Favorite Name" + }, + ... + +and save the file as GeoJSON (say transects.geojson). Finally, set the +option:: + + geojsonFiles = ['transects.geojson'] + +(giving an absolute path if necessary) in your custom config file. + + +NetCDF Files +------------ + +As an alternative to (or in addition to) geojson files, you may supply files in +NetCDF format. As before, you provide a list of file names as a python +list of ``str``:: + + geojsonOrNetcdfFiles = ['file1.nc', '/path/to/file2.nc'] + +In this case, the stem of the filename (``file1`` and ``file2`` in the example) +also serves as the name of the transect. The NetCDF files must contain +``lat`` and ``lon`` variables. These variables should be 1D arrays with the +same dimension name (e.g. ``nPoints``). + +Fields +------ + +Since there are no observations associated with geojson transects, you are +free to choose which MPAS fields you would like to plot. These fields are +provided as a python dictionary. The keys are names for the fields (anything +you would like use as a prefix on files and subtask names, best if it does +not contain spaces). The values are python dictionaries. The values +associate with the ``mpas`` key are the names of the 3D fields where transects +are desired. The ``units`` entry indicates the units to display on the +colorbar. The ``titleName`` entry specifies the name of the field to include +in plot titles and captions. + +Each field must have a corresponding section in the config file defining its +color maps. For example, ``temperature`` has an associated +``[geojsonNetcdfTemperatureTransect]`` section. + +Other Options +------------- + +For details on the remaining configuration options, see: + * :ref:`config_transects` + * :ref:`config_remapping` + * :ref:`config_colormaps` + * :ref:`config_seasons` + +Example Result +-------------- + +.. image:: examples/geojson_transect.png + :width: 500 px + :align: center + +.. _`MPAS geometric features repository`: https://github.com/MPAS-Dev/geometric_features +.. _`geojson.io`: http://geojson.io/ diff --git a/docs/users_guide/tasks/geojsonTransects.rst b/docs/users_guide/tasks/geojsonTransects.rst deleted file mode 100644 index da611e93c..000000000 --- a/docs/users_guide/tasks/geojsonTransects.rst +++ /dev/null @@ -1,261 +0,0 @@ -.. _task_geojsonTransects: - -geojsonTransects -================ - -An analysis task for interpolating MPAS fields to transects specified by files -in ``geojson`` format.. - -Component and Tags:: - - component: ocean - tags: climatology, transect, geojson - -Configuration Options ---------------------- - -The following configuration options are available for this task:: - - [geojsonTransects] - ## options related to plotting model transects at points determined by a - ## geojson file. To generate your own geojson file, go to: - ## http://geojson.io/ - ## and draw one or more polylines, then add a name to each: - ## - ## "properties": { - ## "name": "My Favorite Name" - ## }, - ## and save the file as GeoJSON (say transects.geojson). Finally, set the - ## option: - ## geojsonFiles = ['transects.geojson'] - ## (giving an absolute path if necessary) in your custom config file. - - # a list of geojson files containing lat/lon points in LineStrings to be - # plotted. If relative paths are given, they are relative to the current - # working directory. The files must be listed in quotes, e.g.: - # geojsonFiles = ['file1.geojson', '/path/to/file2.geojson'] - geojsonFiles = [] - - # a list of dictionaries for each field to plot. The dictionary includes - # prefix (used for file names, task names and sections) as well as the mpas - # name of the field, units for colorbars and a the name as it should appear - # in figure titles and captions. - fields = - [{'prefix': 'temperature', - 'mpas': 'timeMonthly_avg_activeTracers_temperature', - 'units': r'$\degree$C', - 'titleName': 'Potential Temperature'}, - {'prefix': 'salinity', - 'mpas': 'timeMonthly_avg_activeTracers_salinity', - 'units': r'PSU', - 'titleName': 'Salinity'}, - {'prefix': 'potentialDensity', - 'mpas': 'timeMonthly_avg_potentialDensity', - 'units': r'kg m$^{-3}$', - 'titleName': 'Potential Density'}, - {'prefix': 'zonalVelocity', - 'mpas': 'timeMonthly_avg_velocityZonal', - 'units': r'm s$^{-1}$', - 'titleName': 'Zonal Velocity'}, - {'prefix': 'meridionalVelocity', - 'mpas': 'timeMonthly_avg_velocityMeridional', - 'units': r'm s$^{-1}$', - 'titleName': 'Meridional Velocity'}] - - # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, - # Nov, Dec, JFM, AMJ, JAS, OND, ANN) - seasons = ['ANN'] - - # The approximate horizontal resolution (in km) of each transect. Latitude/ - # longitude between observation points will be subsampled at this interval. - # Use 'obs' to indicate no subsampling. - horizontalResolution = 5 - - # The name of the vertical comparison grid. Valid values are 'mpas' for the - # MPAS vertical grid or any other name if the vertical grid is defined by - # 'verticalComparisonGrid' - #verticalComparisonGridName = mpas - verticalComparisonGridName = uniform_0_to_4000m_at_10m - - # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas'. - # This should be numpy array of (typically negative) elevations (in m). - verticalComparisonGrid = numpy.linspace(0, -4000, 401) - - # The minimum weight of a destination cell after remapping. Any cell with - # weights lower than this threshold will therefore be masked out. - renormalizationThreshold = 0.01 - - - [geojsonTemperatureTransects] - ## options related to plotting geojson transects of potential temperature - - # colormap for model/observations - colormapNameResult = RdYlBu_r - # the type of norm used in the colormap - normTypeResult = linear - # A dictionary with keywords for the SemiLogNorm - normArgsResult = {'vmin': -2., 'vmax': 30.} - # place the ticks automatically by default - # colorbarTicksResult = numpy.linspace(-2., 2., 9) - - # colormap for differences - colormapNameDifference = balance - # the type of norm used in the colormap - normTypeDifference = linear - # A dictionary with keywords for the SemiLogNorm - normArgsDifference = {'vmin': -2., 'vmax': 2.} - # place the ticks automatically by default - # colorbarTicksDifference = numpy.linspace(-2., 2., 9) - - - [geojsonSalinityTransects] - ## options related to plotting geojson transects of salinity - - # colormap for model/observations - colormapNameResult = haline - # the type of norm used in the colormap - normTypeResult = linear - # A dictionary with keywords for the SemiLogNorm - normArgsResult = {'vmin': 30, 'vmax': 39.0} - # place the ticks automatically by default - # colorbarTicksResult = numpy.linspace(34.2, 35.2, 9) - - # colormap for differences - colormapNameDifference = balance - # the type of norm used in the colormap - normTypeDifference = linear - # A dictionary with keywords for the SemiLogNorm - normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} - # place the ticks automatically by default - # colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9) - - - [geojsonPotentialDensityTransects] - ## options related to plotting geojson transects of potential density - - # colormap for model/observations - colormapNameResult = Spectral_r - # the type of norm used in the colormap - normTypeResult = linear - # A dictionary with keywords for the norm - normArgsResult = {'vmin': 1026.5, 'vmax': 1028.} - # place the ticks automatically by default - # colorbarTicksResult = numpy.linspace(1026., 1028., 9) - - # colormap for differences - colormapNameDifference = balance - # the type of norm used in the colormap - normTypeDifference = linear - # A dictionary with keywords for the norm - normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} - # place the ticks automatically by default - # colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9) - - - [geojsonZonalVelocityTransects] - ## options related to plotting geojson transects of zonal velocity - - # colormap for model/observations - colormapNameResult = delta - # color indices into colormapName for filled contours - # the type of norm used in the colormap - normTypeResult = linear - # A dictionary with keywords for the norm - normArgsResult = {'vmin': -0.2, 'vmax': 0.2} - # determine the ticks automatically by default, uncomment to specify - # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) - - # colormap for differences - colormapNameDifference = balance - # the type of norm used in the colormap - normTypeDifference = linear - # A dictionary with keywords for the norm - normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} - # determine the ticks automatically by default, uncomment to specify - # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) - - - [geojsonMeridionalVelocityTransects] - ## options related to plotting geojson transects of meridional velocity - - # colormap for model/observations - colormapNameResult = delta - # color indices into colormapName for filled contours - # the type of norm used in the colormap - normTypeResult = linear - # A dictionary with keywords for the norm - normArgsResult = {'vmin': -0.2, 'vmax': 0.2} - # determine the ticks automatically by default, uncomment to specify - # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) - - # colormap for differences - colormapNameDifference = balance - # the type of norm used in the colormap - normTypeDifference = linear - # A dictionary with keywords for the norm - normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} - # determine the ticks automatically by default, uncomment to specify - # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) - -Geojson Files -------------- - -This task takes a list of geojson file names (supplied as a python list of -``str``:: - - geojsonFiles = ['file1.geojson', '/path/to/file2.geojson'] - -Transects are specified by ``LineString`` objects in the files. Some examples -are provided in the `MPAS geometric features repository`_. You can also -generate your own very easily at To generate your own geojson file, go to -`geojson.io`_ and draw one or more polylines, then add a name to each:: - - ... - "properties": { - "name": "My Favorite Name" - }, - ... - -and save the file as GeoJSON (say transects.geojson). Finally, set the -option:: - - geojsonFiles = ['transects.geojson'] - -(giving an absolute path if necessary) in your custom config file. - - -Fields ------- - -Since there are no observations associated with geojson transects, you are -free to choose which MPAS fields you would like to plot. These fields are -provided as a python dictionary. The keys are names for the fields (anything -you would like use as a prefix on files and subtask names, best if it does -not contain spaces). The values are python dictionaries. The values -associate with the ``mpas`` key are the names of the 3D fields where transects -are desired. The ``units`` entry indicates the units to display on the -colorbar. The ``titleName`` entry specifies the name of the field to include -in plot titles and captions. - -Each field must have a corresponding section in the config file defining its -color maps. For example, ``temperature`` has an associated -``[geojsonTemperatureTransect]`` section. - -Other Options -------------- - -For details on the remaining configuration options, see: - * :ref:`config_transects` - * :ref:`config_remapping` - * :ref:`config_colormaps` - * :ref:`config_seasons` - -Example Result --------------- - -.. image:: examples/geojson_transect.png - :width: 500 px - :align: center - -.. _`MPAS geometric features repository`: https://github.com/MPAS-Dev/geometric_features -.. _`geojson.io`: http://geojson.io/ diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index 4bbb7c88b..cab22ea39 100644 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -37,7 +37,7 @@ from mache import discover_machine, MachineInfo -from mpas_tools.config import MpasConfigParser +from tranche import Tranche from mpas_analysis.shared.analysis_task import AnalysisFormatter @@ -70,7 +70,7 @@ def update_time_bounds_in_config(config): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche contains config options """ @@ -88,10 +88,10 @@ def build_analysis_list(config, controlConfig): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche contains config options - controlConfig : mpas_tools.config.MpasConfigParser or None + controlConfig : tranche.Tranche or None contains config options for a control run, or ``None`` if no config file for a control run was specified @@ -186,6 +186,14 @@ def build_analysis_list(config, controlConfig): config, oceanClimatologyTasks['avg'], oceanRegionMasksTask, controlConfig)) + analyses.append(ocean.ClimatologyMapCustom( + config, oceanClimatologyTasks['avg'], controlConfig)) + + + analyses.append(ocean.ClimatologyMapWindStressCurl( + config, oceanClimatologyTasks['avg'], controlConfig) + ) + analyses.append(ocean.ConservationTask( config, controlConfig)) @@ -241,9 +249,9 @@ def build_analysis_list(config, controlConfig): analyses.append(ocean.WoaTransects(config, oceanClimatologyTasks['avg'], controlConfig)) - analyses.append(ocean.GeojsonTransects(config, - oceanClimatologyTasks['avg'], - controlConfig)) + analyses.append(ocean.GeojsonNetcdfTransects(config, + oceanClimatologyTasks['avg'], + controlConfig)) oceanRegionalProfiles = ocean.OceanRegionalProfiles( config, oceanRegionMasksTask, controlConfig) @@ -580,7 +588,7 @@ def update_generate(config, generate): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche contains config options generate : str @@ -607,7 +615,7 @@ def run_analysis(config, analyses): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche contains config options analyses : OrderedDict of ``AnalysisTask`` objects @@ -861,7 +869,7 @@ def build_config(user_config_file, shared_configs, machine_info): if not os.path.exists(user_config_file): raise OSError(f'A config file {user_config_file} was specified but ' f'the file does not exist') - config = MpasConfigParser() + config = Tranche() for config_file in shared_configs: if config_file.endswith('.py'): # we'll skip config options set in python files @@ -1045,7 +1053,7 @@ def main(): parser.print_help() sys.exit(0) - config = MpasConfigParser() + config = Tranche() # add default.cfg to cover default not included in the config files # provided on the command line diff --git a/mpas_analysis/analysis_task_template.py b/mpas_analysis/analysis_task_template.py index 503046c94..710e189d3 100644 --- a/mpas_analysis/analysis_task_template.py +++ b/mpas_analysis/analysis_task_template.py @@ -84,7 +84,7 @@ class MyTask(AnalysisTask): # python class start with the argument self, which is not included in # the list of arguments when you call a method of an object (because it # is always included automatically). - # config is an mpas_tools.config.MpasConfigParser object that can be used + # config is an tranche.Tranche object that can be used # to get configuration options stored in default.cfg or a custom config # file specific to a given simulation. See examples below or in # existing analysis tasks. @@ -103,7 +103,7 @@ def __init__(self, config, prerequsiteTask, myArg='myDefaultValue'): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options prerequsiteTask : ``AnotherTaskClass`` diff --git a/mpas_analysis/default.cfg b/mpas_analysis/default.cfg index 3fcd11d33..00cb1882d 100755 --- a/mpas_analysis/default.cfg +++ b/mpas_analysis/default.cfg @@ -527,6 +527,13 @@ lonLines = np.arange(-180., 181., 30.) generate = True +[ocean] +## options related to ocean analysis + +# the name of the stream that points to the MPAS mesh file. +meshStream = mesh + + [oceanObservations] ## options related to ocean observations with which the results will be ## compared @@ -585,6 +592,14 @@ remappedClimSubdirectory = clim/obs/remapped baseDirectory = /dir/to/ocean/reference +[seaIce] +## options related to sea-ice analysis + +# the name of the stream that points to the MPAS mesh file. The "mesh" stream +# in MPAS-seaice points to a dummy file, so it is not useful for this purpose. +meshStream = landIceMasks + + [seaIceObservations] ## options related to sea ice observations with which the results will be ## compared @@ -1691,6 +1706,37 @@ makeTables = False iceShelvesInTable = [] +[climatologyMapWindStressCurl] +## options related to plotting horizontally remapped climatologies of +## wind stress curl against control model results + +# colormap for model/observations +colormapNameResult = cmo.curl +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# color indices into colormapName for filled contours +# the type of norm used in the colormap +normTypeResult = linear +# A dictionary with keywords for the norm +normArgsResult = {'vmin': -1e-6, 'vmax': 1e-6} + +# colormap for differences +colormapNameDifference = cmo.balance +# whether the colormap is indexed or continuous +colormapTypeDifference = continuous +# the type of norm used in the colormap +normTypeDifference = linear +# A dictionary with keywords for the norm +normArgsDifference = {'vmin': -2e-7, 'vmax': 2e-7} + +# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['ANN'] + +# comparison grid(s) on which to plot analysis +comparisonGrids = ['latlon'] + + [timeSeriesAntarcticMelt] ## options related to plotting time series of melt below Antarctic ice shelves @@ -2151,6 +2197,215 @@ normTypeDifference = linear normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} +[climatologyMapCustom] +## options related to plotting climatology maps of any field at various depths +## (if they include a depth dimension) without observatons for comparison + +# comparison grid(s) +comparisonGrids = ['latlon'] + +# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['ANN'] + +# list of depths in meters (positive up) at which to analyze, 'top' for the +# sea surface, 'bot' for the sea floor +depths = ['top', -200, -400, -600, -800, -1000, -1500, -2000, 'bot'] + +# a list of variables available to plot. New variables can be added as long +# as they correspond to a single field already found in MPAS-Ocean's +# timeSeriesStatsMonthly output. Add the 'name', 'title', 'units' (with $$ +# instead a single dollar sign for the config parser), and 'mpas'(the +# timeSeriesStatsMonthly variable name as a single-item list) entries for each +# variable. Then, add a section below climatologyMapCustom with +# the colormap settings for that variable. +availableVariables = { + 'temperature': + {'title': 'Potential Temperature', + 'units': r'$$^\circ$$C', + 'mpas': ['timeMonthly_avg_activeTracers_temperature']}, + 'salinity': + {'title': 'Salinity', + 'units': 'PSU', + 'mpas': ['timeMonthly_avg_activeTracers_salinity']}, + 'potentialDensity': + {'title': 'Potential Density', + 'units': 'kg m$$^{-3}$$', + 'mpas': ['timeMonthly_avg_potentialDensity']}, + 'thermalForcing': + {'title': 'Thermal Forcing', + 'units': r'$$^\circ$$C', + 'mpas': ['timeMonthly_avg_activeTracers_temperature', + 'timeMonthly_avg_activeTracers_salinity', + 'timeMonthly_avg_density', + 'timeMonthly_avg_layerThickness']}, + 'zonalVelocity': + {'title': 'Zonal Velocity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_velocityZonal']}, + 'meridionalVelocity': + {'title': 'Meridional Velocity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_velocityMeridional']}, + 'velocityMagnitude': + {'title': 'Velocity Magnitude', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_velocityZonal', + 'timeMonthly_avg_velocityMeridional']}, + 'vertVelocity': + {'title': 'Vertical Velocity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_vertVelocityTop']}, + 'vertDiff': + {'title': 'Vertical Diffusivity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_vertDiffTopOfCell']}, + 'vertVisc': + {'title': 'Vertical Viscosity', + 'units': r'm s$$^{-1}$$', + 'mpas': ['timeMonthly_avg_vertViscTopOfCell']}, + 'mixedLayerDepth': + {'title': 'Mixed Layer Depth', + 'units': 'm', + 'mpas': ['timeMonthly_avg_dThreshMLD'], + 'has_depth': False}, + } + +# a list of fields top plot for each depth slice. All supported fields are +# listed above +variables = [] + + +[climatologyMapCustomTemperature] +## options related to plotting climatology maps of potential temperature at +## various levels, including the sea surface and sea floor, possibly against +### control model results + +# colormap for model/observations +colormapNameResult = RdYlBu_r +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# the type of norm used in the colormap +normTypeResult = linear +# A dictionary with keywords for the norm +normArgsResult = {'vmin': -2., 'vmax': 10.} +# place the ticks automatically by default +# colorbarTicksResult = numpy.linspace(-2., 10., 9) + +# colormap for differences +colormapNameDifference = balance +# whether the colormap is indexed or continuous +colormapTypeDifference = continuous +# the type of norm used in the colormap +normTypeDifference = linear +# A dictionary with keywords for the norm +normArgsDifference = {'vmin': -5., 'vmax': 5.} +# place the ticks automatically by default +# colorbarTicksDifference = numpy.linspace(-5., 5., 9) + +[climatologyMapCustomSalinity] +colormapNameResult = haline +colormapTypeResult = continuous +normTypeResult = linear +normArgsResult = {'vmin': 32.2, 'vmax': 35.5} +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = linear +normArgsDifference = {'vmin': -1.5, 'vmax': 1.5} + +[climatologyMapCustomPotentialDensity] +colormapNameResult = Spectral_r +colormapTypeResult = continuous +normTypeResult = linear +normArgsResult = {'vmin': 1026.5, 'vmax': 1028.} +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = linear +normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} + +[climatologyMapCustomThermalForcing] +colormapNameResult = thermal +colormapTypeResult = continuous +normTypeResult = linear +normArgsResult = {'vmin': -1., 'vmax': 5.} +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = linear +normArgsDifference = {'vmin': -3., 'vmax': 3.} + +[climatologyMapCustomZonalVelocity] +colormapNameResult = delta +colormapTypeResult = continuous +normTypeResult = linear +normArgsResult = {'vmin': -0.2, 'vmax': 0.2} +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = linear +normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + +[climatologyMapCustomMeridionalVelocity] +colormapNameResult = delta +colormapTypeResult = continuous +normTypeResult = linear +normArgsResult = {'vmin': -0.2, 'vmax': 0.2} +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = linear +normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + +[climatologyMapCustomVelocityMagnitude] +colormapNameResult = ice +colormapTypeResult = continuous +normTypeResult = log +normArgsResult = {'vmin': 1.e-3, 'vmax': 1.} +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = linear +normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + +[climatologyMapCustomVertVelocity] +colormapNameResult = delta +colormapTypeResult = continuous +normTypeResult = linear +normArgsResult = {'vmin': -1e-5, 'vmax': 1e-5} +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = linear +normArgsDifference = {'vmin': -1e-5, 'vmax': 1e-5} + +[climatologyMapCustomVertDiff] +colormapNameResult = rain +colormapTypeResult = continuous +normTypeResult = log +normArgsResult = {'vmin': 1e-6, 'vmax': 1.} +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = linear +normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + +[climatologyMapCustomVertVisc] +colormapNameResult = rain +colormapTypeResult = continuous +normTypeResult = log +normArgsResult = {'vmin': 1e-6, 'vmax': 1.} +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = linear +normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + +[climatologyMapCustomMixedLayerDepth] +colormapNameResult = viridis +colormapTypeResult = continuous +normTypeResult = log +normArgsResult = {'vmin': 10., 'vmax': 300.} +colorbarTicksResult = [10, 20, 40, 60, 80, 100, 200, 300] +colormapNameDifference = balance +colormapTypeDifference = continuous +normTypeDifference = symLog +normArgsDifference = {'linthresh': 10., 'linscale': 0.5, 'vmin': -200., + 'vmax': 200.} +colorbarTicksDifference = [-200., -100., -50., -20., -10., 0., 10., 20., 50., 100., 200.] + [climatologyMapSose] ## options related to plotting climatology maps of Antarctic fields at various ## levels, including the sea floor against control model results and SOSE @@ -3407,9 +3662,11 @@ normArgsDifference = {'vmin': -1.0, 'vmax': 1.0} contourLevelsDifference = np.arange(-0.9, 1.0, 0.4) -[geojsonTransects] +[geojsonNetcdfTransects] ## options related to plotting model transects at points determined by a -## geojson file. To generate your own geojson file, go to: +## user-specified geojson or NetCDF file. +## +## To generate your own geojson file, go to: ## http://geojson.io/ ## and draw one or more polylines, then add a name to each: ## @@ -3420,18 +3677,26 @@ contourLevelsDifference = np.arange(-0.9, 1.0, 0.4) ## option: ## geojsonFiles = ['transects.geojson'] ## (giving an absolute path if necessary) in your custom config file. - -# a list of geojson files containing lat/lon points in LineStrings to be -# plotted. If relative paths are given, they are relative to the current -# working directory. The files must be listed in quotes, e.g.: -# geojsonFiles = ['file1.geojson', '/path/to/file2.geojson'] -geojsonFiles = [] - -# a list of dictionaries for each field to plot. The dictionary includes -# prefix (used for file names, task names and sections) as well as the mpas -# name of the field, units for colorbars and a the name as it should appear -# in figure titles and captions. -fields = +## +## If you provide a NetCDF file instead, it simply needs to have 'lat` and +## `lon` variables. The `lat` and `lon` variables should be 1D arrays +## with the same dimension name (e.g. 'nPoints'). The name of the file +## (without the base path or extension) will serve as the transect name with +## underscores converted to spaces. + +# a list of geojson and/or NetCDF files. The geojson files must contain +# lat/lon points in LineStrings to be plotted. The NetCDF files need 'lat' +# and 'lon' variables with the same dimesion name. If relative paths are +# given, they are relative to the current working directory. The files must +# be listed in quotes, e.g.: +# geojsonOrNetcdfFiles = ['file1.geojson', '/path/to/file2.geojson', 'file3.nc'] +geojsonOrNetcdfFiles = [] + +# a list of dictionaries for each field available to plot. The dictionary +# includes prefix (used for file names, task names and sections) as well as the +# mpas name of the field, units for colorbars and a the name as it should +# appear in figure titles and captions. +availableVariables = [{'prefix': 'temperature', 'mpas': 'timeMonthly_avg_activeTracers_temperature', 'units': r'$$\degree$$C', @@ -3451,7 +3716,25 @@ fields = {'prefix': 'meridionalVelocity', 'mpas': 'timeMonthly_avg_velocityMeridional', 'units': r'm s$$^{-1}$$', - 'titleName': 'Meridional Velocity'}] + 'titleName': 'Meridional Velocity'}, + {'prefix': 'vertVelocity', + 'mpas': 'timeMonthly_avg_vertVelocityTop', + 'units': r'm s$$^{-1}$$', + 'titleName': 'Vertical Velocity'}, + {'prefix': 'vertDiff', + 'mpas': 'timeMonthly_avg_vertDiffTopOfCell', + 'units': r'm s$$^{-1}$$', + 'titleName': 'Vertical Diffusivity'}, + {'prefix': 'vertVisc', + 'mpas': 'timeMonthly_avg_vertViscTopOfCell', + 'units': r'm s$$^{-1}$$', + 'titleName': 'Vertical Viscosity'}, + ] + +# a list of the prefixes from availableVariables that should be plotted +variables = ['temperature', 'salinity', 'potentialDensity', + 'zonalVelocity', 'meridionalVelocity', 'vertVelocity', + 'vertDiff', 'vertVisc'] # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, # Nov, Dec, JFM, AMJ, JAS, OND, ANN) @@ -3485,7 +3768,7 @@ verticalBounds = [] renormalizationThreshold = 0.01 -[geojsonTemperatureTransects] +[geojsonNetcdfTemperatureTransects] ## options related to plotting geojson transects of potential temperature # colormap for model/observations @@ -3517,7 +3800,7 @@ normArgsDifference = {'vmin': -2., 'vmax': 2.} contourLevelsDifference = [] -[geojsonSalinityTransects] +[geojsonNetcdfSalinityTransects] ## options related to plotting geojson transects of salinity # colormap for model/observations @@ -3549,7 +3832,7 @@ normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} contourLevelsDifference = [] -[geojsonPotentialDensityTransects] +[geojsonNetcdfPotentialDensityTransects] ## options related to plotting geojson transects of potential density # colormap for model/observations @@ -3581,7 +3864,7 @@ normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} contourLevelsDifference = [] -[geojsonZonalVelocityTransects] +[geojsonNetcdfZonalVelocityTransects] ## options related to plotting geojson transects of zonal velocity # colormap for model/observations @@ -3591,12 +3874,12 @@ colormapTypeResult = continuous # the type of norm used in the colormap normTypeResult = linear # A dictionary with keywords for the norm -normArgsResult = {'vmin': -0.2, 'vmax': 0.2} +normArgsResult = {'vmin': -0.05, 'vmax': 0.05} # determine the ticks automatically by default, uncomment to specify # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) # contour line levels (use [] for automatic contour selection, 'none' for no # contour lines) -contourLevelsResult = [] +contourLevelsResult = 'none' # colormap for differences colormapNameDifference = balance @@ -3605,15 +3888,15 @@ colormapTypeDifference = continuous # the type of norm used in the colormap normTypeDifference = linear # A dictionary with keywords for the norm -normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} +normArgsDifference = {'vmin': -0.05, 'vmax': 0.05} # determine the ticks automatically by default, uncomment to specify # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) # contour line levels (use [] for automatic contour selection, 'none' for no # contour lines) -contourLevelsDifference = [] +contourLevelsDifference = 'none' -[geojsonMeridionalVelocityTransects] +[geojsonNetcdfMeridionalVelocityTransects] ## options related to plotting geojson transects of meridional velocity # colormap for model/observations @@ -3623,12 +3906,12 @@ colormapTypeResult = continuous # the type of norm used in the colormap normTypeResult = linear # A dictionary with keywords for the norm -normArgsResult = {'vmin': -0.2, 'vmax': 0.2} +normArgsResult = {'vmin': -0.05, 'vmax': 0.05} # determine the ticks automatically by default, uncomment to specify # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) # contour line levels (use [] for automatic contour selection, 'none' for no # contour lines) -contourLevelsResult = [] +contourLevelsResult = 'none' # colormap for differences colormapNameDifference = balance @@ -3637,12 +3920,105 @@ colormapTypeDifference = continuous # the type of norm used in the colormap normTypeDifference = linear # A dictionary with keywords for the norm -normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} +normArgsDifference = {'vmin': -0.05, 'vmax': 0.05} # determine the ticks automatically by default, uncomment to specify # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) # contour line levels (use [] for automatic contour selection, 'none' for no # contour lines) -contourLevelsDifference = [] +contourLevelsDifference = 'none' + +[geojsonNetcdfVertVelocityTransects] +## options related to plotting geojson transects of meridional velocity + +# colormap for model/observations +colormapNameResult = delta +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# the type of norm used in the colormap +normTypeResult = linear +# A dictionary with keywords for the norm +normArgsResult = {'vmin': -1e-5, 'vmax': 1e-5} +# determine the ticks automatically by default, uncomment to specify +# colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) +# contour line levels (use [] for automatic contour selection, 'none' for no +# contour lines) +contourLevelsResult = 'none' + +# colormap for differences +colormapNameDifference = balance +# whether the colormap is indexed or continuous +colormapTypeDifference = continuous +# the type of norm used in the colormap +normTypeDifference = linear +# A dictionary with keywords for the norm +normArgsDifference = {'vmin': -1e-5, 'vmax': 1e-5} +# determine the ticks automatically by default, uncomment to specify +# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) +# contour line levels (use [] for automatic contour selection, 'none' for no +# contour lines) +contourLevelsDifference = 'none' + +[geojsonNetcdfVertDiffTransects] +## options related to plotting geojson transects of meridional velocity + +# colormap for model/observations +colormapNameResult = rain +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# the type of norm used in the colormap +normTypeResult = log +# A dictionary with keywords for the norm +normArgsResult = {'vmin': 1e-6, 'vmax': 1.} +# determine the ticks automatically by default, uncomment to specify +# colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) +# contour line levels (use [] for automatic contour selection, 'none' for no +# contour lines) +contourLevelsResult = 'none' + +# colormap for differences +colormapNameDifference = balance +# whether the colormap is indexed or continuous +colormapTypeDifference = continuous +# the type of norm used in the colormap +normTypeDifference = linear +# A dictionary with keywords for the norm +normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} +# determine the ticks automatically by default, uncomment to specify +# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) +# contour line levels (use [] for automatic contour selection, 'none' for no +# contour lines) +contourLevelsDifference = 'none' + +[geojsonNetcdfVertViscTransects] +## options related to plotting geojson transects of meridional velocity + +# colormap for model/observations +colormapNameResult = rain +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# the type of norm used in the colormap +normTypeResult = log +# A dictionary with keywords for the norm +normArgsResult = {'vmin': 1e-6, 'vmax': 1.} +# determine the ticks automatically by default, uncomment to specify +# colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) +# contour line levels (use [] for automatic contour selection, 'none' for no +# contour lines) +contourLevelsResult = 'none' + +# colormap for differences +colormapNameDifference = balance +# whether the colormap is indexed or continuous +colormapTypeDifference = continuous +# the type of norm used in the colormap +normTypeDifference = linear +# A dictionary with keywords for the norm +normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} +# determine the ticks automatically by default, uncomment to specify +# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) +# contour line levels (use [] for automatic contour selection, 'none' for no +# contour lines) +contourLevelsDifference = 'none' [soseTransects] @@ -4502,7 +4878,7 @@ colormapNameDifference = balance # whether the colormap is indexed or continuous colormapTypeDifference = indexed # color indices into colormapName for filled contours -colormapIndicesDifference = [0, 32, 64, 96, 112, 128, 128, 144, 160, 192, 224, 255] +colormapIndicesDifference = [0, 0, 26, 51, 77, 102, 128, 128, 153, 179, 204, 230, 255, 255] # colormap levels/values for contour boundaries colorbarLevelsDifference = [-0.5, -0.4, -0.3, -0.2, -0.1, -0.05, 0, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5] @@ -4548,7 +4924,7 @@ colormapNameDifference = balance # whether the colormap is indexed or continuous colormapTypeDifference = indexed # color indices into colormapName for filled contours -colormapIndicesDifference = [0, 32, 64, 96, 112, 128, 128, 144, 160, 192, 224, 255] +colormapIndicesDifference = [0, 0, 26, 51, 77, 102, 128, 128, 153, 179, 204, 230, 255, 255] # colormap levels/values for contour boundaries colorbarLevelsDifference = [-0.5, -0.4, -0.3, -0.2, -0.1, -0.05, 0, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5] @@ -4594,7 +4970,7 @@ colormapNameDifference = balance # whether the colormap is indexed or continuous colormapTypeDifference = indexed # color indices into colormapName for filled contours -colormapIndicesDifference = [0, 32, 64, 96, 128, 128, 160, 192, 224, 255] +colormapIndicesDifference = [0, 0, 32, 64, 96, 128, 128, 160, 192, 223, 255, 255] # colormap levels/values for contour boundaries colorbarLevelsDifference = [-3., -2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5, 3.] @@ -4635,7 +5011,7 @@ colormapNameDifference = balance # whether the colormap is indexed or continuous colormapTypeDifference = indexed # color indices into colormapName for filled contours -colormapIndicesDifference = [0, 32, 64, 96, 128, 128, 160, 192, 224, 255] +colormapIndicesDifference = [0, 0, 32, 64, 96, 128, 128, 160, 192, 223, 255, 255] # colormap levels/values for contour boundaries colorbarLevelsDifference = [-3., -2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5, 3.] diff --git a/mpas_analysis/download_data.py b/mpas_analysis/download_data.py index 3c70aeea6..ce850ef9d 100755 --- a/mpas_analysis/download_data.py +++ b/mpas_analysis/download_data.py @@ -19,7 +19,7 @@ import argparse -import pkg_resources +import importlib.resources as resources import os from mpas_analysis.shared.io.download import download_files @@ -49,9 +49,10 @@ def download_analysis_data(): pass urlBase = 'https://web.lcrc.anl.gov/public/e3sm/diagnostics' - analysisFileList = pkg_resources.resource_string( - 'mpas_analysis', - 'obs/{}_input_files'.format(args.dataset)).decode('utf-8') + resource = resources.files('mpas_analysis.obs').joinpath( + f'{args.dataset}_input_files') + with resource.open('r') as f: + analysisFileList = f.read() # remove any empty strings from the list analysisFileList = list(filter(None, analysisFileList.split('\n'))) diff --git a/mpas_analysis/ocean/__init__.py b/mpas_analysis/ocean/__init__.py index 107d0785d..cdf68106a 100644 --- a/mpas_analysis/ocean/__init__.py +++ b/mpas_analysis/ocean/__init__.py @@ -25,6 +25,14 @@ from mpas_analysis.ocean.climatology_map_argo import \ ClimatologyMapArgoTemperature, ClimatologyMapArgoSalinity +from mpas_analysis.ocean.climatology_map_custom import ( + ClimatologyMapCustom +) + +from mpas_analysis.ocean.climatology_map_wind_stress_curl import ( + ClimatologyMapWindStressCurl +) + from mpas_analysis.ocean.conservation import ConservationTask from mpas_analysis.ocean.time_series_temperature_anomaly import \ @@ -57,7 +65,9 @@ from mpas_analysis.ocean.osnap_transects import OsnapTransects from mpas_analysis.ocean.sose_transects import SoseTransects from mpas_analysis.ocean.woa_transects import WoaTransects -from mpas_analysis.ocean.geojson_transects import GeojsonTransects +from mpas_analysis.ocean.geojson_netcdf_transects import ( + GeojsonNetcdfTransects +) from mpas_analysis.ocean.ocean_regional_profiles import \ OceanRegionalProfiles diff --git a/mpas_analysis/ocean/antship_transects.py b/mpas_analysis/ocean/antship_transects.py index d301761cb..eb7f519c5 100644 --- a/mpas_analysis/ocean/antship_transects.py +++ b/mpas_analysis/ocean/antship_transects.py @@ -34,14 +34,14 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted as a transect - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -68,8 +68,8 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): if verticalComparisonGridName in ['mpas', 'obs']: verticalComparisonGrid = None else: - verticalComparisonGrid = config.getexpression( - sectionName, 'verticalComparisonGrid', use_numpyfunc=True) + verticalComparisonGrid = config.getnumpy( + sectionName, 'verticalComparisonGrid') verticalBounds = config.getexpression(sectionName, 'verticalBounds') diff --git a/mpas_analysis/ocean/climatology_map_antarctic_melt.py b/mpas_analysis/ocean/climatology_map_antarctic_melt.py index 34e55bb3f..32fd0acb7 100644 --- a/mpas_analysis/ocean/climatology_map_antarctic_melt.py +++ b/mpas_analysis/ocean/climatology_map_antarctic_melt.py @@ -50,7 +50,7 @@ def __init__(self, config, mpasClimatologyTask, regionMasksTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` @@ -59,7 +59,7 @@ def __init__(self, config, mpasClimatologyTask, regionMasksTask, regionMasksTask : ``ComputeRegionMasks`` A task for computing region masks - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Configuration options for a control run """ # Authors @@ -333,8 +333,8 @@ def run_task(self): # ------- # Xylar Asay-Davis - # first, load the land-ice mask from the restart file - dsLandIceMask = xr.open_dataset(self.restartFileName) + # first, load the land-ice mask from the mesh file + dsLandIceMask = xr.open_dataset(self.meshFilename) dsLandIceMask = dsLandIceMask[['landIceMask']] dsLandIceMask = dsLandIceMask.isel(Time=0) self.landIceMask = dsLandIceMask.landIceMask > 0. @@ -407,7 +407,7 @@ def get_observation_descriptor(self, fileName): # stereographic coordinates projection = get_pyproj_projection(comparison_grid_name='antarctic') obsDescriptor = ProjectionGridDescriptor.read( - projection, fileName=fileName, xVarName='x', yVarName='y') + projection, filename=fileName, x_var_name='x', y_var_name='y') # update the mesh name to match the format used elsewhere in # MPAS-Analysis @@ -416,7 +416,7 @@ def get_observation_descriptor(self, fileName): width = 1e-3 * (x[-1] - x[0]) height = 1e-3 * (y[-1] - y[0]) res = 1e-3 * (x[1] - x[0]) - obsDescriptor.meshName = f'{width}x{height}km_{res}km_Antarctic_stereo' + obsDescriptor.mesh_name = f'{width}x{height}km_{res}km_Antarctic_stereo' return obsDescriptor @@ -460,7 +460,7 @@ def __init__(self, parentTask, mpasClimatologyTask, controlConfig, mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Configuration options for a control run (if any) regionMasksTask : ``ComputeRegionMasks`` @@ -555,12 +555,11 @@ def run_task(self): cellMasks = \ dsRegionMask.regionCellMasks.chunk({'nRegions': 10}) - restartFileName = \ - self.runStreams.readpath('restart')[0] + meshFilename = self.get_mesh_filename() - dsRestart = xr.open_dataset(restartFileName) - landIceFraction = dsRestart.landIceFraction.isel(Time=0) - areaCell = dsRestart.areaCell + dsMesh = xr.open_dataset(meshFilename) + landIceFraction = dsMesh.landIceFraction.isel(Time=0) + areaCell = dsMesh.areaCell # convert from kg/s to kg/yr totalMeltFlux = constants.sec_per_year * \ diff --git a/mpas_analysis/ocean/climatology_map_argo.py b/mpas_analysis/ocean/climatology_map_argo.py index 5349b5262..aa92f98b9 100644 --- a/mpas_analysis/ocean/climatology_map_argo.py +++ b/mpas_analysis/ocean/climatology_map_argo.py @@ -49,13 +49,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -189,13 +189,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -394,8 +394,8 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using Lat/Lon # coordinates obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, - latVarName='latCoord', - lonVarName='lonCoord') + lat_var_name='latCoord', + lon_var_name='lonCoord') dsObs.close() return obsDescriptor diff --git a/mpas_analysis/ocean/climatology_map_bgc.py b/mpas_analysis/ocean/climatology_map_bgc.py index 2d2c81023..7703e3a73 100644 --- a/mpas_analysis/ocean/climatology_map_bgc.py +++ b/mpas_analysis/ocean/climatology_map_bgc.py @@ -37,13 +37,13 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) Authors @@ -346,8 +346,8 @@ def get_observation_descriptor(self, fileName): # coordinates dsObs = self.build_observational_dataset(fileName) obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, - latVarName='lat', - lonVarName='lon') + lat_var_name='lat', + lon_var_name='lon') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/ocean/climatology_map_bsf.py b/mpas_analysis/ocean/climatology_map_bsf.py index c30a6f119..4dac10212 100644 --- a/mpas_analysis/ocean/climatology_map_bsf.py +++ b/mpas_analysis/ocean/climatology_map_bsf.py @@ -8,15 +8,18 @@ # Additional copyright and license information can be found in the LICENSE file # distributed with this code, or at # https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE +import os + import xarray as xr -import numpy as np -import scipy.sparse -import scipy.sparse.linalg + +from mpas_tools.ocean import ( + compute_barotropic_streamfunction, + shift_barotropic_streamfunction +) from mpas_analysis.shared import AnalysisTask from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask from mpas_analysis.shared.plot import PlotClimatologyMapSubtask -from mpas_analysis.ocean.utility import compute_zmid from mpas_analysis.shared.projection import comparison_grid_option_suffixes @@ -37,13 +40,13 @@ def __init__(self, config, mpas_climatology_task, control_config=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask The task that produced the climatology to be remapped and plotted - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # noqa: E501 field_name = 'barotropicStreamfunction' @@ -59,9 +62,7 @@ def __init__(self, config, mpas_climatology_task, control_config=None): # read in what seasons we want to plot seasons = config.getexpression(section_name, 'seasons') - depth_ranges = config.getexpression(section_name, - 'depthRanges', - use_numpyfunc=True) + depth_ranges = config.getnumpy(section_name, 'depthRanges') if len(seasons) == 0: raise ValueError(f'config section {section_name} does not contain ' f'valid list of seasons') @@ -213,6 +214,11 @@ def __init__(self, mpas_climatology_task, parent_task, variable_list, seasons, comparison_grid_names, subtaskName=subtask_name, vertices=True) + # this requires a lot of memory so let's reserve all the available + # tasks + parallelTaskCount = self.config.getint('execute', 'parallelTaskCount') + self.subprocessCount = parallelTaskCount + self.min_depth = min_depth self.max_depth = max_depth self.include_bolus = None @@ -253,6 +259,39 @@ def setup_and_check(self): # Add the variables and seasons, now that we have the variable list self.mpasClimatologyTask.add_variables(self.variableList, self.seasons) + def run_task(self): + """ + Compute the requested climatologies + """ + config = self.config + # check if climatology exists and if all comparison grids are present + for season in self.seasons: + masked_climatology_filename = self.get_masked_file_name(season) + if not os.path.exists(masked_climatology_filename): + continue + all_found = True + with xr.open_dataset(masked_climatology_filename) as ds: + for comparison_grid_name in self.comparisonDescriptors.keys(): + grid_suffix = \ + comparison_grid_option_suffixes[comparison_grid_name] + config_section_name = f'{self.taskName}{grid_suffix}' + if config.has_section(config_section_name): + mpas_field_name = \ + f'barotropicStreamfunction{grid_suffix}' + if mpas_field_name not in ds: + all_found = False + break + if not all_found: + # if not, remove the files and recompute/remap + os.remove(masked_climatology_filename) + for comparison_grid_name in self.comparisonDescriptors.keys(): + remapped_filename = self.get_remapped_file_name( + season, comparison_grid_name) + if os.path.exists(remapped_filename): + os.remove(remapped_filename) + + super().run_task() + def customize_masked_climatology(self, climatology, season): """ Compute the masked climatology from the normal velocity @@ -274,14 +313,56 @@ def customize_masked_climatology(self, climatology, season): logger = self.logger config = self.config - ds_mesh = xr.open_dataset(self.restartFileName) - ds_mesh = ds_mesh[['cellsOnEdge', 'cellsOnVertex', 'nEdgesOnCell', - 'edgesOnCell', 'verticesOnCell', 'verticesOnEdge', - 'edgesOnVertex', 'dcEdge', 'dvEdge', 'bottomDepth', - 'maxLevelCell', 'latVertex', 'areaTriangle',]] - ds_mesh.load() - bsf_vertex = self._compute_barotropic_streamfunction_vertex( - ds_mesh, climatology) + ds_mesh = xr.open_dataset(self.meshFilename) + var_list = [ + 'cellsOnEdge', + 'cellsOnVertex', + 'nEdgesOnCell', + 'edgesOnCell', + 'verticesOnCell', + 'verticesOnEdge', + 'edgesOnVertex', + 'dcEdge', + 'dvEdge', + 'bottomDepth', + 'minLevelCell', + 'maxLevelCell', + 'latVertex', + 'areaTriangle', + ] + if 'minLevelCell' not in ds_mesh: + # some older meshes don't have this one + ds_mesh['minLevelCell'] = xr.ones_like(ds_mesh.maxLevelCell) + + ds_mesh = ds_mesh[var_list].as_numpy() + + masked_filename = self.get_masked_file_name(season) + masked_dir = os.path.dirname(masked_filename) + + cells_on_vertex = ds_mesh.cellsOnVertex - 1 + lat_vertex = ds_mesh.latVertex + bsf_vertex = compute_barotropic_streamfunction( + ds_mesh=ds_mesh, + ds=climatology, + min_depth=self.min_depth, + max_depth=self.max_depth, + include_bolus=self.include_bolus, + include_submesoscale=self.include_submesoscale, + logger=logger, + tmp_dir=masked_dir, + ) + + lat_range = config.getexpression( + self.taskName, 'latitudeRangeForZeroBSF') + + bsf_vertex = shift_barotropic_streamfunction( + bsf_vertex=bsf_vertex, + lat_range=lat_range, + cells_on_vertex=cells_on_vertex, + lat_vertex=lat_vertex, + logger=logger, + ) + logger.info('bsf on vertices computed.') climatology['barotropicStreamfunction'] = bsf_vertex @@ -305,234 +386,16 @@ def customize_masked_climatology(self, climatology, season): lat_range = config.getexpression( config_section_name, 'latitudeRangeForZeroBSF') - climatology[mpas_field_name] = _shift_bsf( - bsf_vertex, lat_range, ds_mesh.cellsOnVertex - 1, - ds_mesh.latVertex) + climatology[mpas_field_name] = shift_barotropic_streamfunction( + bsf_vertex=bsf_vertex, + lat_range=lat_range, + cells_on_vertex=cells_on_vertex, + lat_vertex=lat_vertex, + logger=logger, + ) climatology[mpas_field_name].attrs['units'] = 'Sv' climatology[mpas_field_name].attrs['description'] = \ f'barotropic streamfunction at vertices, offset for ' \ f'{grid_suffix} plots' return climatology - - def _compute_vert_integ_velocity(self, ds_mesh, ds): - - cells_on_edge = ds_mesh.cellsOnEdge - 1 - inner_edges = np.logical_and(cells_on_edge.isel(TWO=0) >= 0, - cells_on_edge.isel(TWO=1) >= 0) - - # convert from boolean mask to indices - inner_edges = np.flatnonzero(inner_edges.values) - - cell0 = cells_on_edge.isel(nEdges=inner_edges, TWO=0) - cell1 = cells_on_edge.isel(nEdges=inner_edges, TWO=1) - n_vert_levels = ds.sizes['nVertLevels'] - - layer_thickness = ds.timeMonthly_avg_layerThickness - max_level_cell = ds_mesh.maxLevelCell - 1 - - vert_index = xr.DataArray.from_dict( - {'dims': ('nVertLevels',), 'data': np.arange(n_vert_levels)}) - z_mid = compute_zmid(ds_mesh.bottomDepth, max_level_cell, - layer_thickness) - z_mid_edge = 0.5*(z_mid.isel(nCells=cell0) + - z_mid.isel(nCells=cell1)) - - normal_velocity = ds.timeMonthly_avg_normalVelocity - if self.include_bolus: - normal_velocity += ds.timeMonthly_avg_normalGMBolusVelocity - if self.include_submesoscale: - normal_velocity += ds.timeMonthly_avg_normalMLEvelocity - normal_velocity = normal_velocity.isel(nEdges=inner_edges) - - layer_thickness_edge = 0.5*(layer_thickness.isel(nCells=cell0) + - layer_thickness.isel(nCells=cell1)) - mask_bottom = (vert_index <= max_level_cell).T - mask_bottom_edge = np.logical_and(mask_bottom.isel(nCells=cell0), - mask_bottom.isel(nCells=cell1)) - masks = [mask_bottom_edge, - z_mid_edge <= self.min_depth, - z_mid_edge >= self.max_depth] - for mask in masks: - normal_velocity = normal_velocity.where(mask) - layer_thickness_edge = layer_thickness_edge.where(mask) - - vert_integ_velocity = np.zeros(ds_mesh.dims['nEdges'], dtype=float) - inner_vert_integ_vel = ( - (layer_thickness_edge * normal_velocity).sum(dim='nVertLevels')) - vert_integ_velocity[inner_edges] = inner_vert_integ_vel.values - - vert_integ_velocity = xr.DataArray(vert_integ_velocity, - dims=('nEdges',)) - - return vert_integ_velocity - - def _compute_edge_sign_on_vertex(self, ds_mesh): - edges_on_vertex = ds_mesh.edgesOnVertex - 1 - vertices_on_edge = ds_mesh.verticesOnEdge - 1 - - nvertices = ds_mesh.sizes['nVertices'] - vertex_degree = ds_mesh.sizes['vertexDegree'] - - edge_sign_on_vertex = np.zeros((nvertices, vertex_degree), dtype=int) - vertices = np.arange(nvertices) - for iedge in range(vertex_degree): - eov = edges_on_vertex.isel(vertexDegree=iedge) - valid_edge = eov >= 0 - - v0_on_edge = vertices_on_edge.isel(nEdges=eov, TWO=0) - v1_on_edge = vertices_on_edge.isel(nEdges=eov, TWO=1) - valid_edge = np.logical_and(valid_edge, v0_on_edge >= 0) - valid_edge = np.logical_and(valid_edge, v1_on_edge >= 0) - - mask = np.logical_and(valid_edge, v0_on_edge == vertices) - edge_sign_on_vertex[mask, iedge] = -1 - - mask = np.logical_and(valid_edge, v1_on_edge == vertices) - edge_sign_on_vertex[mask, iedge] = 1 - - return edge_sign_on_vertex - - def _compute_vert_integ_vorticity(self, ds_mesh, vert_integ_velocity, - edge_sign_on_vertex): - - area_vertex = ds_mesh.areaTriangle - dc_edge = ds_mesh.dcEdge - edges_on_vertex = ds_mesh.edgesOnVertex - 1 - - vertex_degree = ds_mesh.sizes['vertexDegree'] - - vert_integ_vorticity = xr.zeros_like(ds_mesh.latVertex) - for iedge in range(vertex_degree): - eov = edges_on_vertex.isel(vertexDegree=iedge) - edge_sign = edge_sign_on_vertex[:, iedge] - dc = dc_edge.isel(nEdges=eov) - vert_integ_vel = vert_integ_velocity.isel(nEdges=eov) - vert_integ_vorticity += ( - dc / area_vertex * edge_sign * vert_integ_vel) - - return vert_integ_vorticity - - def _compute_barotropic_streamfunction_vertex(self, ds_mesh, ds): - edge_sign_on_vertex = self._compute_edge_sign_on_vertex(ds_mesh) - vert_integ_velocity = self._compute_vert_integ_velocity(ds_mesh, ds) - vert_integ_vorticity = self._compute_vert_integ_vorticity( - ds_mesh, vert_integ_velocity, edge_sign_on_vertex) - self.logger.info('vertically integrated vorticity computed.') - - config = self.config - lat_range = config.getexpression( - 'climatologyMapBSF', 'latitudeRangeForZeroBSF') - - nvertices = ds_mesh.sizes['nVertices'] - vertex_degree = ds_mesh.sizes['vertexDegree'] - - cells_on_vertex = ds_mesh.cellsOnVertex - 1 - edges_on_vertex = ds_mesh.edgesOnVertex - 1 - vertices_on_edge = ds_mesh.verticesOnEdge - 1 - area_vertex = ds_mesh.areaTriangle - dc_edge = ds_mesh.dcEdge - dv_edge = ds_mesh.dvEdge - - # one equation involving vertex degree + 1 vertices for each vertex - # plus 2 entries for the boundary condition and Lagrange multiplier - ndata = (vertex_degree + 1) * nvertices + 2 - indices = np.zeros((2, ndata), dtype=int) - data = np.zeros(ndata, dtype=float) - - # the laplacian on the dual mesh of the streamfunction is the - # vertically integrated vorticity - vertices = np.arange(nvertices, dtype=int) - idata = (vertex_degree + 1) * vertices + 1 - indices[0, idata] = vertices - indices[1, idata] = vertices - for iedge in range(vertex_degree): - eov = edges_on_vertex.isel(vertexDegree=iedge) - dc = dc_edge.isel(nEdges=eov) - dv = dv_edge.isel(nEdges=eov) - - v0 = vertices_on_edge.isel(nEdges=eov, TWO=0) - v1 = vertices_on_edge.isel(nEdges=eov, TWO=1) - - edge_sign = edge_sign_on_vertex[:, iedge] - - mask = v0 == vertices - # the difference is v1 - v0, so we want to subtract this vertex - # when it is v0 and add it when it is v1 - this_vert_sign = np.where(mask, -1., 1.) - # the other vertex is obviously whichever one this is not - other_vert_index = np.where(mask, v1, v0) - # if there are invalid vertices, we need to make sure we don't - # index out of bounds. The edge_sign will mask these out - other_vert_index = np.where(other_vert_index >= 0, - other_vert_index, 0) - - idata_other = idata + iedge + 1 - - indices[0, idata] = vertices - indices[1, idata] = vertices - indices[0, idata_other] = vertices - indices[1, idata_other] = other_vert_index - - this_data = this_vert_sign * edge_sign * dc / (dv * area_vertex) - data[idata] += this_data - data[idata_other] = -this_data - - # Now, the boundary condition: To begin with, we set the BSF at the - # frist vertext to zero - indices[0, -2] = nvertices - indices[1, -2] = 0 - data[-2] = 1. - - # The same in the final column - indices[0, -1] = 0 - indices[1, -1] = nvertices - data[-1] = 1. - - # one extra spot for the Lagrange multiplier - rhs = np.zeros(nvertices + 1, dtype=float) - - rhs[0:-1] = vert_integ_vorticity.values - - matrix = scipy.sparse.csr_matrix( - (data, indices), - shape=(nvertices + 1, nvertices + 1)) - - solution = scipy.sparse.linalg.spsolve(matrix, rhs) - - # drop the Lagrange multiplier and convert to Sv with the desired sign - # convention - bsf_vertex = xr.DataArray(-1e-6 * solution[0:-1], - dims=('nVertices',)) - - bsf_vertex = _shift_bsf(bsf_vertex, lat_range, cells_on_vertex, - ds_mesh.latVertex) - - return bsf_vertex - - -def _shift_bsf(bsf_vertex, lat_range, cells_on_vertex, lat_vertex): - """ - Shift the barotropic streamfunction to be zero at the boundary over - the given latitude range - """ - is_boundary_cov = cells_on_vertex == -1 - boundary_vertices = is_boundary_cov.sum(dim='vertexDegree') > 0 - - boundary_vertices = np.logical_and( - boundary_vertices, - lat_vertex >= np.deg2rad(lat_range[0]) - ) - boundary_vertices = np.logical_and( - boundary_vertices, - lat_vertex <= np.deg2rad(lat_range[1]) - ) - - # convert from boolean mask to indices - boundary_vertices = np.flatnonzero(boundary_vertices.values) - - mean_boundary_bsf = bsf_vertex.isel(nVertices=boundary_vertices).mean() - - bsf_shifted = bsf_vertex - mean_boundary_bsf - - return bsf_shifted diff --git a/mpas_analysis/ocean/climatology_map_custom.py b/mpas_analysis/ocean/climatology_map_custom.py new file mode 100644 index 000000000..328144aa8 --- /dev/null +++ b/mpas_analysis/ocean/climatology_map_custom.py @@ -0,0 +1,326 @@ +# This software is open source software available under the BSD-3 license. +# +# Copyright (c) 2022 Triad National Security, LLC. All rights reserved. +# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights +# reserved. +# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. +# +# Additional copyright and license information can be found in the LICENSE file +# distributed with this code, or at +# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE + +import numpy as np +import xarray as xr +from mpas_tools.cime.constants import constants as cime_constants + +from mpas_analysis.ocean.remap_depth_slices_subtask import ( + RemapDepthSlicesSubtask +) +from mpas_analysis.shared import AnalysisTask +from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask +from mpas_analysis.shared.plot import PlotClimatologyMapSubtask + + +class ClimatologyMapCustom(AnalysisTask): + """ + A flexible analysis task for plotting climatologies of any MPAS-Ocean field + on cells from timeSeriesStatsMonthly at various depths (if the field has + vertical levels) and for various seasons. + + Various derived fields are also supported: + + * velocity magnitude + * thermal forcing (temperature - freezing temperature) + """ + + def __init__(self, config, mpasClimatologyTask, controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : tranche.Tranche + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : tranche.Tranche, optional + Configuration options for a control run (if any) + """ + + taskName = 'climatologyMapCustom' + + sectionName = taskName + variablesNames = config.getexpression(sectionName, 'variables') + + tags = ['climatology', 'horizontalMap'] + variablesNames + + # call the constructor from the base class (AnalysisTask) + super().__init__( + config=config, taskName=taskName, componentName='ocean', tags=tags) + + if len(variablesNames) == 0: + return + + variablesNames = config.getexpression(sectionName, 'variables') + if len(variablesNames) == 0: + return + + availableVariables = config.getexpression(sectionName, + 'availableVariables') + + variables = {varName: availableVariables[varName] for varName in + variablesNames} + + for varName in variablesNames: + if 'has_depth' not in variables[varName]: + # we assume variables have depth unless otherwise specified + variables[varName]['has_depth'] = True + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {sectionName} does not contain ' + 'valid list of seasons') + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError(f'config section {sectionName} does not contain ' + 'valid list of comparison grids') + + depths = config.getexpression(sectionName, 'depths') + + if len(depths) == 0: + raise ValueError(f'config section {sectionName} does not ' + f'contain valid list of depths') + + remapMpasSubtask = RemapMpasDerivedVariableClimatology( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='custom3D', + variables=variables, + seasons=seasons, + depths=depths, + comparisonGridNames=comparisonGridNames) + + galleryGroup = 'Custom Climatology Maps' + groupLink = 'custclimmaps' + + for varName, metadata in variables.items(): + title = metadata['title'] + units = metadata['units'] + hasDepth = metadata['has_depth'] + upperVarName = varName[0].upper() + varName[1:] + varSectionName = f'{self.taskName}{upperVarName}' + + remapObsSubtask = None + + refTitleLabel = None + diffTitleLabel = None + if controlConfig is not None: + controlRunName = controlConfig.get('runs', 'mainRunName') + refTitleLabel = f'Control: {controlRunName}' + diffTitleLabel = 'Main - Control' + + if hasDepth: + localDepths = depths + else: + localDepths = [None] + + for comparisonGridName in comparisonGridNames: + for depth in localDepths: + for season in seasons: + + subtaskName = f'plot{upperVarName}_{season}_' \ + f'{comparisonGridName}' + if depth is not None: + subtaskName = f'{subtaskName}_depth_{depth}' + + subtask = PlotClimatologyMapSubtask( + parentTask=self, + season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapMpasSubtask, + remapObsClimatologySubtask=remapObsSubtask, + controlConfig=controlConfig, + depth=depth, + subtaskName=subtaskName) + + subtask.set_plot_info( + outFileLabel=f'cust_{varName}', + fieldNameInTitle=title, + mpasFieldName=varName, + refFieldName=varName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=units, + imageCaption=title, + galleryGroup=galleryGroup, + groupSubtitle=None, + groupLink=groupLink, + galleryName=title, + configSectionName=varSectionName) + + self.add_subtask(subtask) + + +class RemapMpasDerivedVariableClimatology(RemapDepthSlicesSubtask): + """ + A subtask for computing derived variables (such as velocity magnitude and + thermal forcing) as part of remapping climatologies at depth slices + + Attributes + ---------- + variables : dict of dict + A dictionary of variable definitions, with variable names as keys + + """ + + def __init__(self, mpasClimatologyTask, parentTask, climatologyName, + variables, seasons, depths, comparisonGridNames): + + """ + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + mpasClimatologyTask : MpasClimatologyTask + The task that produced the climatology to be remapped + + parentTask : AnalysisTask + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + climatologyName : str + A name that describes the climatology (e.g. a short version of + the important field(s) in the climatology) used to name the + subdirectories for each stage of the climatology + + variables : dict of dict + A dictionary of variable definitions, with variable names as keys + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed or ['none'] (not ``None``) if only monthly + climatologies are needed. + + depths : list of {None, float, 'top', 'bot'} + A list of depths at which the climatology will be sliced in the + vertical. + + comparisonGridNames : list of {'latlon', 'antarctic'}, optional + The name(s) of the comparison grid to use for remapping. + """ + self.variables = variables + + mpasVariables = set() + for variable in variables.values(): + for mpasVariable in variable['mpas']: + mpasVariables.add(mpasVariable) + + # call the constructor from the base class + # (RemapMpasClimatologySubtask) + super().__init__( + mpasClimatologyTask, parentTask, climatologyName, mpasVariables, + seasons, depths, comparisonGridNames, iselValues=None) + + def customize_masked_climatology(self, climatology, season): + """ + Construct velocity magnitude as part of the climatology + + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + """ + + # first, compute the derived variables, which may rely on having the + # full 3D variables available + + derivedVars = [] + self._add_vel_mag(climatology, derivedVars) + self._add_thermal_forcing(climatology, derivedVars) + + # then, call the superclass's version of this function so we extract + # the desired slices (but before renaming because it expects the + # original MPAS variable names) + climatology = super().customize_masked_climatology(climatology, + season) + # finally, rename the variables and add metadata + for varName, variable in self.variables.items(): + if varName not in derivedVars: + # rename variables from MPAS names to shorter names + mpasvarNames = variable['mpas'] + if len(mpasvarNames) == 1: + mpasvarName = mpasvarNames[0] + climatology[varName] = climatology[mpasvarName] + climatology.drop_vars(mpasvarName) + + climatology[varName].attrs['units'] = variable['units'] + climatology[varName].attrs['description'] = variable['title'] + + return climatology + + def _add_vel_mag(self, climatology, derivedVars): + """ + Add the velocity magnitude to the climatology if requested + """ + varName = 'velocityMagnitude' + if varName not in self.variables: + return + + derivedVars.append(varName) + + zonalVel = climatology.timeMonthly_avg_velocityZonal + meridVel = climatology.timeMonthly_avg_velocityMeridional + climatology[varName] = np.sqrt(zonalVel**2 + meridVel**2) + + def _add_thermal_forcing(self, climatology, derivedVars): + """ + Add thermal forcing to the climatology if requested + """ + varName = 'thermalForcing' + if varName not in self.variables: + return + + derivedVars.append(varName) + + c0 = self.namelist.getfloat( + 'config_land_ice_cavity_freezing_temperature_coeff_0') + cs = self.namelist.getfloat( + 'config_land_ice_cavity_freezing_temperature_coeff_S') + cp = self.namelist.getfloat( + 'config_land_ice_cavity_freezing_temperature_coeff_p') + cps = self.namelist.getfloat( + 'config_land_ice_cavity_freezing_temperature_coeff_pS') + + temp = climatology.timeMonthly_avg_activeTracers_temperature + salin = climatology.timeMonthly_avg_activeTracers_salinity + dens = climatology.timeMonthly_avg_density + thick = climatology.timeMonthly_avg_layerThickness + + dp = cime_constants['SHR_CONST_G']*dens*thick + press = dp.cumsum(dim='nVertLevels') - 0.5*dp + + # add land ice pressure if available + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) + if 'landIcePressure' in ds_mesh: + press += ds_mesh.landIcePressure + + tempFreeze = c0 + cs*salin + cp*press + cps*press*salin + + climatology[varName] = temp - tempFreeze diff --git a/mpas_analysis/ocean/climatology_map_eke.py b/mpas_analysis/ocean/climatology_map_eke.py index cea9c38e0..cd6d70ca3 100644 --- a/mpas_analysis/ocean/climatology_map_eke.py +++ b/mpas_analysis/ocean/climatology_map_eke.py @@ -37,13 +37,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -239,9 +239,9 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates - obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, - latVarName='Lat', - lonVarName='Lon') + obsDescriptor = LatLonGridDescriptor.read(filename=fileName, + lat_var_name='Lat', + lon_var_name='Lon') return obsDescriptor diff --git a/mpas_analysis/ocean/climatology_map_fluxes.py b/mpas_analysis/ocean/climatology_map_fluxes.py index c55de400e..5e503d8e1 100644 --- a/mpas_analysis/ocean/climatology_map_fluxes.py +++ b/mpas_analysis/ocean/climatology_map_fluxes.py @@ -36,13 +36,13 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) fluxType : str, optional diff --git a/mpas_analysis/ocean/climatology_map_mld.py b/mpas_analysis/ocean/climatology_map_mld.py index 0d4fbba67..3f49cd30e 100644 --- a/mpas_analysis/ocean/climatology_map_mld.py +++ b/mpas_analysis/ocean/climatology_map_mld.py @@ -38,13 +38,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -194,8 +194,8 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, - latVarName='lat', - lonVarName='lon') + lat_var_name='lat', + lon_var_name='lon') dsObs.close() return obsDescriptor diff --git a/mpas_analysis/ocean/climatology_map_mld_min_max.py b/mpas_analysis/ocean/climatology_map_mld_min_max.py index 1bc4b6b3c..63379dfc9 100644 --- a/mpas_analysis/ocean/climatology_map_mld_min_max.py +++ b/mpas_analysis/ocean/climatology_map_mld_min_max.py @@ -31,14 +31,14 @@ def __init__(self, config, mpasClimatologyTasks, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTasks : dict of ``MpasClimatologyTask`` The tasks that produced the climatology of monthly min and max to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors diff --git a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py index ba5522d64..e0a39b1c2 100644 --- a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py +++ b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py @@ -39,7 +39,7 @@ def __init__(self, config, mpas_climatology_task, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -49,7 +49,7 @@ def __init__(self, config, mpas_climatology_task, The task that produced the climatology from the first year to be remapped and then subtracted from the main climatology - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ @@ -79,9 +79,9 @@ def __init__(self, config, mpas_climatology_task, raise ValueError(f'config section {section_name} does not contain ' f'valid list of comparison grids') - depth_ranges = config.getexpression('climatologyMapOHCAnomaly', - 'depthRanges', - use_numpyfunc=True) + depth_ranges = config.getnumpy( + 'climatologyMapOHCAnomaly', 'depthRanges' + ) mpas_field_name = 'deltaOHC' @@ -295,8 +295,8 @@ def _compute_ohc(self, climatology): Compute the OHC from the temperature and layer thicknesses in a given climatology data sets. """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) # specific heat [J/(kg*degC)] cp = self.namelist.getfloat('config_specific_heat_sea_water') @@ -305,10 +305,10 @@ def _compute_ohc(self, climatology): units_scale_factor = 1e-9 - n_vert_levels = ds_restart.sizes['nVertLevels'] + n_vert_levels = ds_mesh.sizes['nVertLevels'] - z_mid = compute_zmid(ds_restart.bottomDepth, ds_restart.maxLevelCell-1, - ds_restart.layerThickness) + z_mid = compute_zmid(ds_mesh.bottomDepth, ds_mesh.maxLevelCell-1, + ds_mesh.layerThickness) vert_index = xr.DataArray.from_dict( {'dims': ('nVertLevels',), 'data': np.arange(n_vert_levels)}) @@ -316,7 +316,7 @@ def _compute_ohc(self, climatology): temperature = climatology['timeMonthly_avg_activeTracers_temperature'] layer_thickness = climatology['timeMonthly_avg_layerThickness'] - masks = [vert_index < ds_restart.maxLevelCell, + masks = [vert_index < ds_mesh.maxLevelCell, z_mid <= self.min_depth, z_mid >= self.max_depth] for mask in masks: diff --git a/mpas_analysis/ocean/climatology_map_schmidtko.py b/mpas_analysis/ocean/climatology_map_schmidtko.py index 903e11002..be8d983c3 100644 --- a/mpas_analysis/ocean/climatology_map_schmidtko.py +++ b/mpas_analysis/ocean/climatology_map_schmidtko.py @@ -47,13 +47,13 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -270,7 +270,7 @@ def get_observation_descriptor(self, fileName): mesh_name = ds_obs.attrs['meshName'] obs_descriptor = ProjectionGridDescriptor.create( - projection, x=x, y=y, meshName=mesh_name) + projection, x=x, y=y, mesh_name=mesh_name) return obs_descriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/ocean/climatology_map_sose.py b/mpas_analysis/ocean/climatology_map_sose.py index 366cd5e13..c5cc9ec5f 100644 --- a/mpas_analysis/ocean/climatology_map_sose.py +++ b/mpas_analysis/ocean/climatology_map_sose.py @@ -46,13 +46,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors diff --git a/mpas_analysis/ocean/climatology_map_ssh.py b/mpas_analysis/ocean/climatology_map_ssh.py index 93462208e..4d7eb618c 100644 --- a/mpas_analysis/ocean/climatology_map_ssh.py +++ b/mpas_analysis/ocean/climatology_map_ssh.py @@ -39,13 +39,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -213,9 +213,9 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates - obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, - latVarName='lat', - lonVarName='lon') + obsDescriptor = LatLonGridDescriptor.read(filename=fileName, + lat_var_name='lat', + lon_var_name='lon') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/ocean/climatology_map_sss.py b/mpas_analysis/ocean/climatology_map_sss.py index f737d3bc0..d267ed72e 100644 --- a/mpas_analysis/ocean/climatology_map_sss.py +++ b/mpas_analysis/ocean/climatology_map_sss.py @@ -38,13 +38,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -172,9 +172,9 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates - obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, - latVarName='lat', - lonVarName='lon') + obsDescriptor = LatLonGridDescriptor.read(filename=fileName, + lat_var_name='lat', + lon_var_name='lon') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/ocean/climatology_map_sst.py b/mpas_analysis/ocean/climatology_map_sst.py index f7c901a5c..97d75906e 100644 --- a/mpas_analysis/ocean/climatology_map_sst.py +++ b/mpas_analysis/ocean/climatology_map_sst.py @@ -39,13 +39,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -181,9 +181,9 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates - obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, - latVarName='lat', - lonVarName='lon') + obsDescriptor = LatLonGridDescriptor.read(filename=fileName, + lat_var_name='lat', + lon_var_name='lon') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/ocean/climatology_map_vel.py b/mpas_analysis/ocean/climatology_map_vel.py index bdbfc9641..ede11385d 100644 --- a/mpas_analysis/ocean/climatology_map_vel.py +++ b/mpas_analysis/ocean/climatology_map_vel.py @@ -46,13 +46,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors diff --git a/mpas_analysis/ocean/climatology_map_waves.py b/mpas_analysis/ocean/climatology_map_waves.py index da139981a..0f31c6fb2 100644 --- a/mpas_analysis/ocean/climatology_map_waves.py +++ b/mpas_analysis/ocean/climatology_map_waves.py @@ -374,8 +374,8 @@ def get_observation_descriptor(self, fileName): # {{{ # coordinates dsObs = self.build_observational_dataset(fileName) obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, - latVarName='latitude', - lonVarName='longitude') + lat_var_name='latitude', + lon_var_name='longitude') return obsDescriptor # }}} def build_observational_dataset(self, fileName): # {{{ @@ -495,8 +495,8 @@ def get_observation_descriptor(self, fileName): # {{{ # coordinates dsObs = self.build_observational_dataset(fileName) obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, - latVarName='lat', - lonVarName='lon') + lat_var_name='lat', + lon_var_name='lon') return obsDescriptor # }}} def build_observational_dataset(self, fileName): # {{{ diff --git a/mpas_analysis/ocean/climatology_map_wind_stress_curl.py b/mpas_analysis/ocean/climatology_map_wind_stress_curl.py new file mode 100644 index 000000000..5f28a4872 --- /dev/null +++ b/mpas_analysis/ocean/climatology_map_wind_stress_curl.py @@ -0,0 +1,212 @@ +# This software is open source software available under the BSD-3 license. +# +# Copyright (c) 2022 Triad National Security, LLC. All rights reserved. +# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights +# reserved. +# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. +# +# Additional copyright and license information can be found in the LICENSE file +# distributed with this code, or at +# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE + +import xarray as xr + +from mpas_tools.ocean.streamfunction.vorticity import ( + compute_vertically_integrated_vorticity, +) + +from mpas_analysis.ocean.utility import ( + vector_cell_to_edge_isotropic, + vector_to_edge_normal, +) +from mpas_analysis.shared import AnalysisTask +from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask +from mpas_analysis.shared.plot import PlotClimatologyMapSubtask + + +class ClimatologyMapWindStressCurl(AnalysisTask): + """ + An analysis task for computing and plotting maps of the wind stress curl. + """ + + def __init__(self, config, mpas_climatology_task, control_config=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : tranche.Tranche + Configuration options + + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + + control_config : tranche.Tranche, optional + Configuration options for a control run (if any) + """ # noqa: E501 + + field_name = 'windStressCurl' + super().__init__( + config=config, + taskName='climatologyMapWindStressCurl', + componentName='ocean', + tags=[ + 'climatology', 'horizontalMap', field_name, 'publicObs' + ], + ) + + section_name = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(section_name, 'seasons') + if len(seasons) == 0: + raise ValueError( + f'config section {section_name} does not contain ' + f'valid list of seasons' + ) + + comparison_grid_names = config.getexpression( + section_name, 'comparisonGrids' + ) + + if len(comparison_grid_names) == 0: + raise ValueError( + f'config section {section_name} does not contain ' + f'valid list of comparison grids' + ) + + variable_list = list(RemapMpasWindStressCurl.VARIABLES) + remap_climatology_subtask = RemapMpasWindStressCurl( + mpasClimatologyTask=mpas_climatology_task, + parentTask=self, + climatologyName=field_name, + variableList=variable_list, + seasons=seasons, + comparisonGridNames=comparison_grid_names, + subtaskName='remapWindStressCurl', + vertices=True, + ) + + self.add_subtask(remap_climatology_subtask) + + out_file_label = field_name + field_title = 'Wind Stress Curl' + remap_observations_subtask = None + + mpas_field_name = field_name + if control_config is None: + ref_field_name = None + ref_title_label = None + diff_title_label = 'Model - Observations' + + else: + control_run_name = control_config.get('runs', 'mainRunName') + ref_field_name = mpas_field_name + ref_title_label = f'Control: {control_run_name}' + diff_title_label = 'Main - Control' + + for comparison_grid_name in comparison_grid_names: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask_name = f'plot{season}_{comparison_grid_name}' + + subtask = PlotClimatologyMapSubtask( + self, season, comparison_grid_name, + remap_climatology_subtask, remap_observations_subtask, + controlConfig=control_config, subtaskName=subtask_name) + subtask.set_plot_info( + outFileLabel=out_file_label, + fieldNameInTitle=field_title, + mpasFieldName=mpas_field_name, + refFieldName=ref_field_name, + refTitleLabel=ref_title_label, + diffTitleLabel=diff_title_label, + unitsLabel=r'N m$^{-3}$', + imageCaption=field_title, + galleryGroup='Wind Stress Curl', + groupSubtitle=None, + groupLink='wsc', + galleryName=None, + configSectionName=section_name) + + self.add_subtask(subtask) + + +class RemapMpasWindStressCurl(RemapMpasClimatologySubtask): + """ + A subtask for computing climatologies of the wind stress curl before + it gets remapped to a comparison grid. + """ + + VARIABLES = ( + 'timeMonthly_avg_windStressZonal', + 'timeMonthly_avg_windStressMeridional', + ) + + def setup_and_check(self): + """ + Add the variables needed for computing wind stress curl to the + climatology task + """ + super().setup_and_check() + + # Add the variables and seasons, now that we have the variable list + self.mpasClimatologyTask.add_variables( + list(self.VARIABLES), self.seasons + ) + + def customize_masked_climatology(self, climatology, season): + """ + Compute the wind stress curl and add it to the climatology. + + Parameters + ---------- + climatology : xarray.Dataset + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : xarray.Dataset + the modified climatology data set + """ + logger = self.logger + + ds_mesh = xr.open_dataset(self.meshFilename) + var_list = [ + 'verticesOnEdge', + 'cellsOnVertex', + 'kiteAreasOnVertex', + 'angleEdge', + 'areaTriangle', + 'dcEdge', + 'edgesOnVertex', + 'verticesOnEdge', + 'latVertex', + ] + ds_mesh = ds_mesh[var_list] + + ws_zonal_cell = climatology['timeMonthly_avg_windStressZonal'] + ws_meridional_cell = ( + climatology['timeMonthly_avg_windStressMeridional'] + ) + ws_zonal_edge, ws_meridional_edge = vector_cell_to_edge_isotropic( + ds_mesh, ws_zonal_cell, ws_meridional_cell + ) + ws_normal_edge = vector_to_edge_normal( + ds_mesh, ws_zonal_edge, ws_meridional_edge + ) + + # despite the name, this is the curl operator + wind_sress_curl, _ = compute_vertically_integrated_vorticity( + ds_mesh, ws_normal_edge, logger + ) + climatology['windStressCurl'] = wind_sress_curl + climatology['windStressCurl'].attrs['units'] = 'N m-3' + + # drop the original wind stress variables + climatology = climatology.drop_vars(list(self.VARIABLES)) + + return climatology diff --git a/mpas_analysis/ocean/climatology_map_woa.py b/mpas_analysis/ocean/climatology_map_woa.py index e7e34c1d1..3ca549bdc 100644 --- a/mpas_analysis/ocean/climatology_map_woa.py +++ b/mpas_analysis/ocean/climatology_map_woa.py @@ -47,13 +47,13 @@ def __init__(self, config, mpasClimatologyTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -310,8 +310,8 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using Lat/Lon # coordinates obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, - latVarName='lat', - lonVarName='lon') + lat_var_name='lat', + lon_var_name='lon') dsObs.close() return obsDescriptor diff --git a/mpas_analysis/ocean/compute_transects_subtask.py b/mpas_analysis/ocean/compute_transects_subtask.py index 1f00d3cf3..b4b8ab0db 100644 --- a/mpas_analysis/ocean/compute_transects_subtask.py +++ b/mpas_analysis/ocean/compute_transects_subtask.py @@ -18,11 +18,15 @@ from mpas_tools.viz import mesh_to_triangles from mpas_tools.transects import subdivide_great_circle, \ cartesian_to_great_circle_distance -from mpas_tools.viz.transects import find_transect_cells_and_weights, \ +from mpas_tools.viz.transect.horiz import ( + find_spherical_transect_cells_and_weights, make_triangle_tree -from mpas_tools.ocean.transects import find_transect_levels_and_weights, \ - interp_mpas_to_transect_triangle_nodes, \ - interp_transect_grid_to_transect_triangle_nodes +) +from mpas_tools.ocean.viz.transect.vert import ( + find_transect_levels_and_weights, + interp_mpas_to_transect_nodes, + interp_transect_grid_to_transect_nodes +) from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \ get_climatology_op_directory @@ -31,7 +35,10 @@ make_directories from mpas_analysis.shared.io import write_netcdf_with_fill -from mpas_analysis.ocean.utility import compute_zmid +from mpas_analysis.ocean.utility import ( + compute_zinterface, + compute_zmid +) from mpas_analysis.shared.interpolation import interp_1d @@ -76,6 +83,11 @@ class ComputeTransectsSubtask(RemapMpasClimatologySubtask): zMid : ``xarray.DataArray`` Vertical coordinate at the center of layers, used to interpolate to reference depths + + zInterface : ``xarray.DataArray`` + Vertical coordinate at the interfaces between layers, used to + interpolate to reference depths + """ # Authors # ------- @@ -158,11 +170,12 @@ def __init__(self, mpasClimatologyTask, parentTask, climatologyName, self.collectionDescriptor = None self.maxLevelCell = None self.zMid = None + self.zInterface = None self.remap = self.obsDatasets.horizontalResolution != 'mpas' if self.obsDatasets.horizontalResolution == 'mpas' and \ self.verticalComparisonGridName != 'mpas': - raise ValueError('If the horizontal comparison grid is "mpas", the ' - 'vertical grid must also be "mpas".') + raise ValueError('If the horizontal comparison grid is "mpas", ' + 'the vertical grid must also be "mpas".') def setup_and_check(self): """ @@ -207,8 +220,8 @@ def setup_and_check(self): 'data': x}) self.collectionDescriptor = PointCollectionDescriptor( - lats, lons, collectionName=self.transectCollectionName, - units='degrees', outDimension='nPoints') + lats, lons, collection_name=self.transectCollectionName, + units='degrees', out_dimension='nPoints') self.add_comparison_grid_descriptor(self.transectCollectionName, self.collectionDescriptor) @@ -234,19 +247,27 @@ def run_task(self): # first, get maxLevelCell and zMid, needed for masking - dsMesh = xr.open_dataset(self.restartFileName) + dsMesh = xr.open_dataset(self.meshFilename) dsMesh = dsMesh.isel(Time=0) self.maxLevelCell = dsMesh.maxLevelCell - 1 if self.remap: - zMid = compute_zmid(dsMesh.bottomDepth, dsMesh.maxLevelCell-1, + zMid = compute_zmid(dsMesh.bottomDepth, self.maxLevelCell, dsMesh.layerThickness) self.zMid = \ xr.DataArray.from_dict({'dims': ('nCells', 'nVertLevels'), 'data': zMid}) + zInterface = compute_zinterface(dsMesh.bottomDepth, + self.maxLevelCell, + dsMesh.layerThickness) + + self.zInterface = \ + xr.DataArray.from_dict({'dims': ('nCells', 'nVertLevelsP1'), + 'data': zInterface}) + # then, call run from the base class (RemapMpasClimatologySubtask), # which will perform masking and possibly horizontal remapping super(ComputeTransectsSubtask, self).run_task() @@ -304,20 +325,35 @@ def customize_masked_climatology(self, climatology, season): # ------- # Xylar Asay-Davis - zIndex = xr.DataArray.from_dict( - {'dims': ('nVertLevels',), - 'data': numpy.arange(climatology.sizes['nVertLevels'])}) + maxLevel = { + 'nVertLevels': self.maxLevelCell, + 'nVertLevelsP1': self.maxLevelCell + 1 + } + + for vertDim in ['nVertLevels', 'nVertLevelsP1']: + if vertDim in climatology.dims: + zIndex = xr.DataArray.from_dict( + {'dims': (vertDim,), + 'data': numpy.arange(climatology.sizes[vertDim])}) - cellMask = zIndex <= self.maxLevelCell + mask = zIndex <= maxLevel[vertDim] - for variableName in self.variableList: - climatology[variableName] = \ - climatology[variableName].where(cellMask) + for variableName in self.variableList: + if vertDim in climatology[variableName].dims: + climatology[variableName] = \ + climatology[variableName].where(mask) if self.remap: - climatology['zMid'] = self.zMid + if 'nVertLevels' in climatology.dims: + climatology['zMid'] = self.zMid + if 'nVertLevelsP1' in climatology.dims: + climatology['zInterface'] = self.zInterface - climatology = climatology.transpose('nVertLevels', 'nCells') + transposeDims = ['nVertLevels', 'nVertLevelsP1', 'nCells'] + transposeDims = [dim for dim in transposeDims if dim in + climatology.dims] + + climatology = climatology.transpose(*transposeDims) return climatology @@ -354,10 +390,11 @@ def customize_remapped_climatology(self, climatology, comparisonGridNames, if 'nCells' in climatology.dims: climatology = climatology.rename({'nCells': 'nPoints'}) - dims = ['nPoints', 'nVertLevels'] - if 'nv' in climatology.dims: - dims.append('nv') - climatology = climatology.transpose(*dims) + transposeDims = ['nPoints', 'nVertLevels', 'nVertLevelsP1', 'nv'] + transposeDims = [dim for dim in transposeDims if dim in + climatology.dims] + + climatology = climatology.transpose(*transposeDims) return climatology @@ -395,29 +432,46 @@ def _vertical_interp(self, ds, transectIndex, dsObs, outFileName, ds = ds.where(ds.transectNumber == transectIndex, drop=True) if self.verticalComparisonGridName == 'mpas': - z = ds.zMid - z = z.rename({'nVertLevels': 'nzOut'}) + z = ds['zMid'] + ds.rename({'nVertLevels': 'nzOut'}) elif self.verticalComparisonGridName == 'obs': z = dsObs.z z = z.rename({'nz': 'nzOut'}) else: # a defined vertical grid z = (('nzOut', ), self.verticalComparisonGrid) + ds['z'] = z + + for vertDim, vertCoord in ( + ('nVertLevels', 'zMid'), ('nVertLevelsP1', 'zInterface')): + if vertDim not in ds.dims: + continue + + if self.verticalComparisonGridName == 'mpas' and \ + vertDim == 'nVertLevels': + # no interpolation needed + continue - if self.verticalComparisonGridName == 'mpas': - ds = ds.rename({'zMid': 'z', 'nVertLevels': 'nz'}) - else: - ds['z'] = z # remap each variable - ds = interp_1d(ds, inInterpDim='nVertLevels', inInterpCoord='zMid', - outInterpDim='nzOut', outInterpCoord='z') - ds = ds.rename({'nzOut': 'nz'}) + ds = interp_1d( + ds, + inInterpDim=vertDim, + inInterpCoord=vertCoord, + outInterpDim='nzOut', + outInterpCoord='z', + ) + ds = ds.rename({'nzOut': 'nz'}) if self.verticalComparisonGridName != 'obs' and 'nz' in dsObs.dims: dsObs['zOut'] = z # remap each variable - dsObs = interp_1d(dsObs, inInterpDim='nz', inInterpCoord='z', - outInterpDim='nzOut', outInterpCoord='zOut') + dsObs = interp_1d( + dsObs, + inInterpDim='nz', + inInterpCoord='z', + outInterpDim='nzOut', + outInterpCoord='zOut', + ) dsObs = dsObs.rename({'nzOut': 'nz'}) write_netcdf_with_fill(dsObs, outObsFileName) @@ -471,6 +525,14 @@ def _compute_mpas_transects(self, dsMesh): dsTris = mesh_to_triangles(dsMesh) + layerThickness = dsMesh.layerThickness + bottomDepth = dsMesh.bottomDepth + maxLevelCell = dsMesh.maxLevelCell - 1 + if 'minLevelCell' in dsMesh: + minLevelCell = dsMesh.minLevelCell - 1 + else: + minLevelCell = xr.zeros_like(maxLevelCell) + triangleTree = make_triangle_tree(dsTris) for transectName in transectNames: @@ -493,22 +555,30 @@ def _compute_mpas_transects(self, dsMesh): else: transectZ = None - dsMpasTransect = find_transect_cells_and_weights( - dsTransect.lon, dsTransect.lat, dsTris, dsMesh, - triangleTree, degrees=True) + dsMpasTransect = find_spherical_transect_cells_and_weights( + lon_transect=dsTransect.lon, + lat_transect=dsTransect.lat, + ds_tris=dsTris, + ds_mesh=dsMesh, + tree=triangleTree, + degrees=True) dsMpasTransect = find_transect_levels_and_weights( - dsMpasTransect, dsMesh.layerThickness, - dsMesh.bottomDepth, dsMesh.maxLevelCell - 1, - transectZ) + ds_horiz_transect=dsMpasTransect, + layer_thickness=layerThickness, + bottom_depth=bottomDepth, + min_level_cell=minLevelCell, + max_level_cell=maxLevelCell, + z_transect=transectZ) if 'landIceFraction' in dsMesh: interpCellIndices = dsMpasTransect.interpHorizCellIndices interpCellWeights = dsMpasTransect.interpHorizCellWeights landIceFraction = dsMesh.landIceFraction.isel( nCells=interpCellIndices) - landIceFraction = (landIceFraction * interpCellWeights).sum( - dim='nHorizWeights') + landIceFraction = ( + landIceFraction * interpCellWeights).sum( + dim='nHorizWeights') dsMpasTransect['landIceFraction'] = landIceFraction # use to_netcdf rather than write_netcdf_with_fill because @@ -517,9 +587,7 @@ def _compute_mpas_transects(self, dsMesh): dsMpasTransect.to_netcdf(transectInfoFileName) dsTransectOnMpas = xr.Dataset(dsMpasTransect) - dsTransectOnMpas['x'] = dsMpasTransect.dNode.isel( - nSegments=dsMpasTransect.segmentIndices, - nHorizBounds=dsMpasTransect.nodeHorizBoundsIndices) + dsTransectOnMpas['x'] = dsMpasTransect.dNode dsTransectOnMpas['z'] = dsMpasTransect.zTransectNode @@ -543,11 +611,15 @@ def _compute_mpas_transects(self, dsMesh): dsOnMpas = xr.Dataset(dsMpasTransect) for var in dsMask.data_vars: dims = dsMask[var].dims - if 'nCells' in dims and 'nVertLevels' in dims: + if 'nCells' in dims and ( + 'nVertLevels' in dims or + 'nVertLevelsP1' in dims): dsOnMpas[var] = \ - interp_mpas_to_transect_triangle_nodes( + interp_mpas_to_transect_nodes( dsMpasTransect, dsMask[var]) + dsOnMpas = self._transpose(dsOnMpas) + outFileName = self.get_remapped_file_name( season, comparisonGridName=transectName) dsOnMpas.to_netcdf(outFileName) @@ -558,22 +630,47 @@ def _interp_obs_to_mpas(self, da, dsMpasTransect, threshold=0.1): """ daMask = da.notnull() da = da.where(daMask, 0.) - da = interp_transect_grid_to_transect_triangle_nodes( - dsMpasTransect, da) - daMask = interp_transect_grid_to_transect_triangle_nodes( - dsMpasTransect, daMask) + da = interp_transect_grid_to_transect_nodes( + ds_transect=dsMpasTransect, + da=da) + daMask = interp_transect_grid_to_transect_nodes( + ds_transect=dsMpasTransect, + da=daMask) da = (da / daMask).where(daMask > threshold) return da + @staticmethod + def _transpose(dsOnMpas): + """ + Transpose the data set to have the expected dimension order + """ + dims = dsOnMpas.dims + dimsTransposed = ['nPoints', 'nz', + 'nSegments', 'nHalfLevels', + 'nHorizLevels', 'nVertLevels', + 'nHorizWeights', 'nVertWeights'] + + # drop any dimensions not in the dataset + dimsTransposed = [dim for dim in dimsTransposed if dim in + dims] + # add any other dimensions at the end + for dim in dims: + if dim not in dimsTransposed: + dimsTransposed.append(dim) + dsOnMpas = dsOnMpas.transpose(*dimsTransposed) + + return dsOnMpas + class TransectsObservations(object): + """ A class for loading and manipulating transect observations Attributes ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options obsFileNames : OrderedDict @@ -603,7 +700,7 @@ def __init__(self, config, obsFileNames, horizontalResolution, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options obsFileNames : OrderedDict @@ -611,8 +708,9 @@ def __init__(self, config, obsFileNames, horizontalResolution, observations for a transect horizontalResolution : str - 'obs' for the obs as they are, 'mpas' for the native MPAS mesh, or a - size in km if subdivision of the observational transect is desired. + 'obs' for the obs as they are, 'mpas' for the native MPAS mesh, or + a size in km if subdivision of the observational transect is + desired. transectCollectionName : str A name that describes the collection of transects (e.g. the name diff --git a/mpas_analysis/ocean/conservation.py b/mpas_analysis/ocean/conservation.py index 74fa3ad8d..14f354f47 100644 --- a/mpas_analysis/ocean/conservation.py +++ b/mpas_analysis/ocean/conservation.py @@ -39,10 +39,10 @@ class ConservationTask(AnalysisTask): Attributes ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Contains configuration options for a control run, if provided outputFile : str @@ -98,7 +98,7 @@ def __init__(self, config, controlConfig): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options """ # Authors @@ -252,7 +252,7 @@ def run_task(self): for plot_type in self.plotTypes: for varname in self.variableList[plot_type]: all_plots_variable_list.append(varname) - self._compute_time_series_with_ncrcat(all_plots_variable_list) + self._compute_time_series_with_xarray(all_plots_variable_list) for plot_type in self.plotTypes: self._make_plot(plot_type) @@ -512,9 +512,8 @@ def _make_plot(self, plot_type): filePrefix=filePrefix, componentName='Ocean', componentSubdirectory='ocean', - galleryGroup='Time Series', - groupLink='timeseries', - gallery='Conservation', + galleryGroup='Conservation Time Series', + groupLink='conserv_timeseries', thumbnailDescription=title, imageDescription=caption, imageCaption=caption) @@ -604,28 +603,27 @@ def _get_variable(self, ds, varname, mks=False): return variable - def _compute_time_series_with_ncrcat(self, variable_list): - + def _check_output_safe_to_append(self, variable_list): """ - Uses ncrcat to extact time series from conservationCheckOutput files + Check if the output file is safe to append to by verifying the presence + of necessary variables and determining if new input files are needed. - Raises - ------ - OSError - If ``ncrcat`` is not in the system path. + Parameters + ---------- + variable_list : list of str + List of variables to include in the time series. + + Returns + ------- + append : bool + True if the output file can be safely appended to, False otherwise. + inputFiles : list of str + Updated list of input files to process. """ - - if shutil.which('ncrcat') is None: - raise OSError('ncrcat not found. Make sure the latest nco ' - 'package is installed: \n' - 'conda install nco\n' - 'Note: this presumes use of the conda-forge ' - 'channel.') - - inputFiles = self.inputFiles append = False + inputFiles = self.inputFiles + if os.path.exists(self.outputFile): - # make sure all the necessary variables are also present with xr.open_dataset(self.outputFile) as ds: if ds.sizes['Time'] == 0: updateSubset = False @@ -637,11 +635,7 @@ def _compute_time_series_with_ncrcat(self, variable_list): break if updateSubset: - # add only input files with times that aren't already in - # the output file - append = True - fileNames = sorted(self.inputFiles) inYears, inMonths = get_files_year_month( fileNames, self.historyStreams, @@ -652,33 +646,51 @@ def _compute_time_series_with_ncrcat(self, variable_list): totalMonths = 12 * inYears + inMonths dates = decode_strings(ds.xtime) - lastDate = dates[-1] - lastYear = int(lastDate[0:4]) lastMonth = int(lastDate[5:7]) lastTotalMonths = 12 * lastYear + lastMonth - inputFiles = [] - for index, inputFile in enumerate(fileNames): - if totalMonths[index] > lastTotalMonths: - inputFiles.append(inputFile) + inputFiles = [ + inputFile for index, inputFile in enumerate(fileNames) + if totalMonths[index] > lastTotalMonths + ] if len(inputFiles) == 0: - # nothing to do - return + return append, inputFiles else: - # there is an output file but it has the wrong variables - # so we need ot delete it. - self.logger.warning('Warning: deleting file {self.outputFile}' - ' because it is empty or some variables' - ' were missing') + self.logger.warning( + f'Warning: deleting file {self.outputFile} because it ' + 'is empty or some variables were missing') os.remove(self.outputFile) - variableList = variable_list + ['xtime'] + return append, inputFiles + + def _compute_time_series_with_ncrcat(self, variable_list): + """ + Uses ncrcat to extract time series from conservationCheckOutput files. + + Raises + ------ + OSError + If ``ncrcat`` is not in the system path. + """ + if shutil.which('ncrcat') is None: + raise OSError('ncrcat not found. Make sure the latest nco ' + 'package is installed: \n' + 'conda install nco\n' + 'Note: this presumes use of the conda-forge ' + 'channel.') + + variable_list = variable_list + ['xtime'] + append, inputFiles = self._check_output_safe_to_append(variable_list) + + if len(inputFiles) == 0: + # nothing to do + return args = ['ncrcat', '-4', '--no_tmp_fl', - '-v', ','.join(variableList)] + '-v', ','.join(variable_list)] if append: args.append('--record_append') @@ -710,3 +722,77 @@ def _compute_time_series_with_ncrcat(self, variable_list): if process.returncode != 0: raise subprocess.CalledProcessError(process.returncode, ' '.join(args)) + + def _compute_time_series_with_xarray(self, variable_list): + """ + Uses xarray to extract time series from conservationCheckOutput files, + handling redundant `xtime` entries and sorting by `xtime`. + + Parameters + ---------- + variable_list : list of str + List of variables to include in the time series. + """ + # Authors + # ------- + # Xylar Asay-Davis + + inputFiles = self.inputFiles + variable_list = variable_list + ['xtime'] + + append, inputFiles = self._check_output_safe_to_append(variable_list) + + if len(inputFiles) == 0: + # nothing to do + return + + # Open all input files as a single dataset + self.logger.info( + f'Opening input files with xarray: {inputFiles[0]} ... ' + f'{inputFiles[-1]}') + ds = xr.open_mfdataset( + inputFiles, + combine='nested', + concat_dim='Time', + data_vars='minimal', + coords='minimal', + compat='override' + ) + + # Select only the requested variables + ds = ds[variable_list] + + # Handle redundant `xtime` entries by keeping the last occurrence + self.logger.info('Removing redundant xtime entries...') + _, unique_indices = np.unique(ds['xtime'].values, return_index=True) + unique_indices = sorted(unique_indices) # Ensure ascending order + ds = ds.isel(Time=unique_indices) + + # seeing hanging during saving. Let's try loading + ds.load() + + if append: + # Load the existing dataset and combine it with the new dataset + self.logger.info( + f'Appending to existing dataset in {self.outputFile}...') + with xr.open_dataset(self.outputFile) as existing_ds: + ds = xr.concat([existing_ds, ds], dim='Time') + # Remove redundant `xtime` entries again after concatenation + _, unique_indices = np.unique( + ds['xtime'].values, return_index=True) + unique_indices = sorted(unique_indices) + ds = ds.isel(Time=unique_indices) + + # Sort by `xtime` to ensure the time series is in ascending order + self.logger.info('Sorting by xtime...') + ds = ds.sortby('xtime') + + # again, seeing hanging during saving. Let's try loading + ds.load() + + # Save the resulting dataset to the output file + self.logger.info( + f'Saving concatenated dataset to {self.outputFile}...') + ds.to_netcdf(self.outputFile, format='NETCDF4') + + self.logger.info('Time series successfully created with xarray.') diff --git a/mpas_analysis/ocean/geojson_netcdf_transects.py b/mpas_analysis/ocean/geojson_netcdf_transects.py new file mode 100644 index 000000000..298c6f297 --- /dev/null +++ b/mpas_analysis/ocean/geojson_netcdf_transects.py @@ -0,0 +1,334 @@ +# This software is open source software available under the BSD-3 license. +# +# Copyright (c) 2022 Triad National Security, LLC. All rights reserved. +# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights +# reserved. +# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. +# +# Additional copyright and license information can be found in the LICENSE file +# distributed with this code, or at +# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE +import json +from pathlib import Path + +import numpy as np +import xarray as xr + +from mpas_analysis.ocean.compute_transects_subtask import ( + ComputeTransectsSubtask, + TransectsObservations +) +from mpas_analysis.ocean.plot_transect_subtask import PlotTransectSubtask +from mpas_analysis.shared import AnalysisTask + + +class GeojsonNetcdfTransects(AnalysisTask): + """ + Plot model output at transects defined by lat/lon points in a geojson or + NetCDF file + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, config, mpasClimatologyTask, controlConfig=None): + """ + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + config : tranche.Tranche + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + as a transect + + controlconfig : tranche.Tranche, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + tags = ['climatology', 'transect', 'geojson', 'netcdf'] + + # call the constructor from the base class (AnalysisTask) + super().__init__( + config=config, taskName='geojsonNetcdfTransects', + componentName='ocean', + tags=tags) + + sectionName = self.taskName + + geojsonOrNetcdfFiles = config.getexpression(sectionName, + 'geojsonOrNetcdfFiles') + if len(geojsonOrNetcdfFiles) == 0: + return + + seasons = config.getexpression(sectionName, 'seasons') + + horizontalResolution = config.get(sectionName, 'horizontalResolution') + + verticalComparisonGridName = config.get(sectionName, + 'verticalComparisonGridName') + + if verticalComparisonGridName in ['mpas', 'obs']: + verticalComparisonGrid = None + else: + verticalComparisonGrid = config.getnumpy( + sectionName, 'verticalComparisonGrid' + ) + + availableVariables = config.getexpression( + sectionName, 'availableVariables') + + prefixes = config.getexpression(sectionName, 'variables') + fields = [field for field in availableVariables + if field['prefix'] in prefixes] + + geojsonFileNames = {} + netcdfFileNames = {} + transectNames = [] + for fileName in geojsonOrNetcdfFiles: + ext = Path(fileName).suffix + if ext == '.nc': + transectName = Path(fileName).stem + netcdfFileNames[transectName] = fileName + elif ext == '.geojson': + with open(fileName) as filePointer: + jsonFile = json.load(filePointer) + + for feature in jsonFile['features']: + if feature['geometry']['type'] != 'LineString': + continue + transectName = feature['properties']['name'] + + geojsonFileNames[transectName] = fileName + else: + raise ValueError(f'Unexptect file extension: {ext}') + + if transectName in transectNames: + raise ValueError(f'Transect name {transectName} is repeated.') + + transectNames.append(transectName) + + variableList = [field['mpas'] for field in fields] + + computeGeojsonTransectsSubtask = None + computeNetcdfTransectsSubtask = None + + if geojsonFileNames: + transectCollectionName = 'geojson_transects' + if horizontalResolution != 'obs' and \ + horizontalResolution != 'mpas': + transectCollectionName = \ + f'{transectCollectionName}_{horizontalResolution}km' + + geojsonObservations = GeojsonTransectsObservations( + config, geojsonFileNames, horizontalResolution, + transectCollectionName) + + computeGeojsonTransectsSubtask = ComputeTransectsSubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='geojson', + transectCollectionName=transectCollectionName, + variableList=variableList, + seasons=seasons, + obsDatasets=geojsonObservations, + verticalComparisonGridName=verticalComparisonGridName, + verticalComparisonGrid=verticalComparisonGrid, + subtaskName='remapGeojson') + + if netcdfFileNames: + transectCollectionName = 'netcdf_transects' + if horizontalResolution != 'obs' and \ + horizontalResolution != 'mpas': + transectCollectionName = \ + f'{transectCollectionName}_{horizontalResolution}km' + + netcdfObservations = NetcdfTransectsObservations( + config, netcdfFileNames, horizontalResolution, + transectCollectionName) + + computeNetcdfTransectsSubtask = ComputeTransectsSubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='netcdf', + transectCollectionName=transectCollectionName, + variableList=variableList, + seasons=seasons, + obsDatasets=netcdfObservations, + verticalComparisonGridName=verticalComparisonGridName, + verticalComparisonGrid=verticalComparisonGrid, + subtaskName='remapNetcdf') + + for field in fields: + for transectName in geojsonFileNames: + for season in seasons: + self._add_plot_subtasks( + field=field, season=season, + transectName=transectName, + computeSubtask=computeGeojsonTransectsSubtask, + galleryGroup='Geojson Transects', + groupLink='geojson', + controlConfig=controlConfig) + + for transectName in netcdfFileNames: + for season in seasons: + self._add_plot_subtasks( + field=field, season=season, + transectName=transectName, + computeSubtask=computeNetcdfTransectsSubtask, + galleryGroup='NetCDF Transects', + groupLink='nctransects', + controlConfig=controlConfig) + + def _add_plot_subtasks(self, field, season, transectName, computeSubtask, + galleryGroup, groupLink, controlConfig): + """ + Add a sbutask for plotting the given transect, field and season + """ + config = self.config + sectionName = self.taskName + verticalBounds = config.getexpression(sectionName, 'verticalBounds') + + fieldPrefix = field['prefix'] + + outFileLabel = fieldPrefix + + if controlConfig is None: + refFieldName = None + refTitleLabel = None + diffTitleLabel = None + else: + refFieldName = field['mpas'] + controlRunName = controlConfig.get('runs', 'mainRunName') + refTitleLabel = f'Control: {controlRunName}' + diffTitleLabel = 'Main - Control' + + fieldPrefixUpper = fieldPrefix[0].upper() + fieldPrefix[1:] + fieldNameInTitle = field['titleName'] + transectNameInTitle = transectName.replace('_', ' ') + fieldNameInTitle = \ + f'{fieldNameInTitle} from {transectNameInTitle}' + + configSectionName = f'geojsonNetcdf{fieldPrefixUpper}Transects' + + # make a new subtask for this season and comparison grid + subtask = PlotTransectSubtask( + parentTask=self, + season=season, + transectName=transectName, + fieldName=fieldPrefix, + computeTransectsSubtask=computeSubtask, + plotObs=False, + controlConfig=controlConfig) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle=fieldNameInTitle, + mpasFieldName=field['mpas'], + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=field['units'], + imageCaption=fieldNameInTitle, + galleryGroup=galleryGroup, + groupSubtitle=None, + groupLink=groupLink, + galleryName=field['titleName'], + configSectionName=configSectionName, + verticalBounds=verticalBounds) + + self.add_subtask(subtask) + + +class GeojsonTransectsObservations(TransectsObservations): + """ + A class for loading and manipulating geojson transects + """ + # Authors + # ------- + # Xylar Asay-Davis + + def build_observational_dataset(self, fileName, transectName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + transectName : str + transect name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + with open(fileName) as filePointer: + jsonFile = json.load(filePointer) + + for feature in jsonFile['features']: + if feature['properties']['name'] != transectName: + continue + assert feature['geometry']['type'] == 'LineString' + + coordinates = feature['geometry']['coordinates'] + lon, lat = zip(*coordinates) + break + + dsObs = xr.Dataset() + dsObs['lon'] = (('nPoints',), np.array(lon)) + dsObs.lon.attrs['units'] = 'degrees' + dsObs['lat'] = (('nPoints',), np.array(lat)) + dsObs.lat.attrs['units'] = 'degrees' + + return dsObs + + +class NetcdfTransectsObservations(TransectsObservations): + """ + A class for loading and manipulating netcdf transects + """ + # Authors + # ------- + # Xylar Asay-Davis + + def build_observational_dataset(self, fileName, transectName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + transectName : str + transect name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + dsObs = xr.open_dataset(fileName) + # drop all variables besides lon and lat + dsObs = dsObs[['lon', 'lat']] + + return dsObs diff --git a/mpas_analysis/ocean/geojson_transects.py b/mpas_analysis/ocean/geojson_transects.py deleted file mode 100644 index df62d3512..000000000 --- a/mpas_analysis/ocean/geojson_transects.py +++ /dev/null @@ -1,223 +0,0 @@ -# This software is open source software available under the BSD-3 license. -# -# Copyright (c) 2022 Triad National Security, LLC. All rights reserved. -# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights -# reserved. -# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. -# -# Additional copyright and license information can be found in the LICENSE file -# distributed with this code, or at -# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE -from collections import OrderedDict -import json -import xarray -import numpy - -from mpas_analysis.shared import AnalysisTask -from mpas_analysis.ocean.compute_transects_subtask import \ - ComputeTransectsSubtask, TransectsObservations - -from mpas_analysis.ocean.plot_transect_subtask import PlotTransectSubtask - - -class GeojsonTransects(AnalysisTask): - """ - Plot model output at transects defined by lat/lon points in a geojson file - """ - # Authors - # ------- - # Xylar Asay-Davis - - def __init__(self, config, mpasClimatologyTask, controlConfig=None): - """ - Construct the analysis task and adds it as a subtask of the - ``parentTask``. - - Parameters - ---------- - config : mpas_tools.config.MpasConfigParser - Configuration options - - mpasClimatologyTask : ``MpasClimatologyTask`` - The task that produced the climatology to be remapped and plotted - as a transect - - controlconfig : mpas_tools.config.MpasConfigParser, optional - Configuration options for a control run (if any) - """ - # Authors - # ------- - # Xylar Asay-Davis - - tags = ['climatology', 'transect', 'geojson'] - - # call the constructor from the base class (AnalysisTask) - super(GeojsonTransects, self).__init__( - config=config, taskName='geojsonTransects', - componentName='ocean', - tags=tags) - - sectionName = self.taskName - - geojsonFiles = config.getexpression(sectionName, 'geojsonFiles') - if len(geojsonFiles) == 0: - return - - seasons = config.getexpression(sectionName, 'seasons') - - horizontalResolution = config.get(sectionName, 'horizontalResolution') - - verticalComparisonGridName = config.get(sectionName, - 'verticalComparisonGridName') - - if verticalComparisonGridName in ['mpas', 'obs']: - verticalComparisonGrid = None - else: - verticalComparisonGrid = config.getexpression( - sectionName, 'verticalComparisonGrid', use_numpyfunc=True) - - verticalBounds = config.getexpression(sectionName, 'verticalBounds') - - fields = config.getexpression(sectionName, 'fields') - - obsFileNames = OrderedDict() - for fileName in geojsonFiles: - with open(fileName) as filePointer: - jsonFile = json.load(filePointer) - - for feature in jsonFile['features']: - if feature['geometry']['type'] != 'LineString': - continue - transectName = feature['properties']['name'] - - obsFileNames[transectName] = fileName - - transectCollectionName = 'geojson_transects' - if horizontalResolution != 'obs': - transectCollectionName = '{}_{}km'.format(transectCollectionName, - horizontalResolution) - - transectsObservations = GeojsonTransectsObservations( - config, obsFileNames, horizontalResolution, - transectCollectionName) - - computeTransectsSubtask = ComputeTransectsSubtask( - mpasClimatologyTask=mpasClimatologyTask, - parentTask=self, - climatologyName='geojson', - transectCollectionName=transectCollectionName, - variableList=[field['mpas'] for field in fields], - seasons=seasons, - obsDatasets=transectsObservations, - verticalComparisonGridName=verticalComparisonGridName, - verticalComparisonGrid=verticalComparisonGrid) - - plotObs = False - if controlConfig is None: - - refTitleLabel = None - - diffTitleLabel = None - - else: - controlRunName = controlConfig.get('runs', 'mainRunName') - refTitleLabel = 'Control: {}'.format(controlRunName) - - diffTitleLabel = 'Main - Control' - - for field in fields: - fieldPrefix = field['prefix'] - for transectName in obsFileNames: - for season in seasons: - outFileLabel = fieldPrefix - if controlConfig is None: - refFieldName = None - else: - refFieldName = field['mpas'] - - fieldPrefixUpper = fieldPrefix[0].upper() + fieldPrefix[1:] - fieldNameInTytle = '{} from {}'.format( - field['titleName'], - transectName.replace('_', ' ')) - - # make a new subtask for this season and comparison grid - subtask = PlotTransectSubtask(self, season, transectName, - fieldPrefix, - computeTransectsSubtask, - plotObs, controlConfig) - - subtask.set_plot_info( - outFileLabel=outFileLabel, - fieldNameInTitle=fieldNameInTytle, - mpasFieldName=field['mpas'], - refFieldName=refFieldName, - refTitleLabel=refTitleLabel, - diffTitleLabel=diffTitleLabel, - unitsLabel=field['units'], - imageCaption=fieldNameInTytle, - galleryGroup='Geojson Transects', - groupSubtitle=None, - groupLink='geojson', - galleryName=field['titleName'], - configSectionName='geojson{}Transects'.format( - fieldPrefixUpper), - verticalBounds=verticalBounds) - - self.add_subtask(subtask) - - -class GeojsonTransectsObservations(TransectsObservations): - """ - A class for loading and manipulating geojson transects - - Attributes - ---------- - - obsDatasets : OrderedDict - A dictionary of observational datasets - """ - # Authors - # ------- - # Xylar Asay-Davis - - def build_observational_dataset(self, fileName, transectName): - """ - read in the data sets for observations, and possibly rename some - variables and dimensions - - Parameters - ---------- - fileName : str - observation file name - - transectName : str - transect name - - Returns - ------- - dsObs : ``xarray.Dataset`` - The observational dataset - """ - # Authors - # ------- - # Xylar Asay-Davis - - with open(fileName) as filePointer: - jsonFile = json.load(filePointer) - - for feature in jsonFile['features']: - if feature['properties']['name'] != transectName: - continue - assert(feature['geometry']['type'] == 'LineString') - - coordinates = feature['geometry']['coordinates'] - lon, lat = zip(*coordinates) - break - - dsObs = xarray.Dataset() - dsObs['lon'] = (('nPoints',), numpy.array(lon)) - dsObs.lon.attrs['units'] = 'degrees' - dsObs['lat'] = (('nPoints',), numpy.array(lat)) - dsObs.lat.attrs['units'] = 'degrees' - - return dsObs diff --git a/mpas_analysis/ocean/histogram.py b/mpas_analysis/ocean/histogram.py index 92b2878aa..d7f94f9ad 100644 --- a/mpas_analysis/ocean/histogram.py +++ b/mpas_analysis/ocean/histogram.py @@ -42,7 +42,7 @@ def __init__(self, config, mpasClimatologyTask, regionMasksTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` @@ -51,7 +51,7 @@ def __init__(self, config, mpasClimatologyTask, regionMasksTask, regionMasksTask : ``ComputeRegionMasks`` A task for computing region masks - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Configuration options for a control run (if any) """ @@ -247,10 +247,10 @@ def run_task(self): ds_mask = ds_region_mask.isel(nRegions=region_index) cell_mask = ds_mask.regionCellMasks == 1 - # Open the restart file, which contains unmasked weight variables - restart_filename = self.runStreams.readpath('restart')[0] - ds_restart = xarray.open_dataset(restart_filename) - ds_restart = ds_restart.isel(Time=0) + # Open the mesh file, which contains unmasked weight variables + mesh_filename = self.get_mesh_filename() + ds_mesh = xarray.open_dataset(mesh_filename) + ds_mesh = ds_mesh.isel(Time=0) # Save the cell mask only for the region in its own file, which may be # referenced by future analysis (i.e., as a control run) @@ -263,13 +263,15 @@ def run_task(self): # Fetch the weight variables and mask them for each region for index, var in enumerate(self.variableList): weight_var_name = self.weightList[index] - if weight_var_name in ds_restart.keys(): + if weight_var_name in ds_mesh.keys(): var_name = f'timeMonthly_avg_{var}' ds_weights[f'{var_name}_weight'] = \ - ds_restart[weight_var_name].where(cell_mask, drop=True) + ds_mesh[weight_var_name].where(cell_mask, drop=True) else: - self.logger.warn(f'Weight variable {weight_var_name} is ' - f'not in the restart file, skipping') + self.logger.warning( + f'Weight variable {weight_var_name} is ' + f'not in the mesh file, skipping' + ) weights_filename = \ f'{base_directory}/{self.filePrefix}_{self.regionName}_weights.nc' @@ -291,7 +293,7 @@ class PlotRegionHistogramSubtask(AnalysisTask): sectionName : str The section of the config file to get options from - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche The configuration options for the control run (if any) mpasClimatologyTask : ``MpasClimatologyTask`` @@ -331,7 +333,7 @@ def __init__(self, parentTask, regionGroup, regionName, controlConfig, regionName : str Name of the region to plot - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) sectionName : str diff --git a/mpas_analysis/ocean/hovmoller_ocean_regions.py b/mpas_analysis/ocean/hovmoller_ocean_regions.py index d971a2fff..7a677f2e0 100644 --- a/mpas_analysis/ocean/hovmoller_ocean_regions.py +++ b/mpas_analysis/ocean/hovmoller_ocean_regions.py @@ -48,7 +48,7 @@ def __init__(self, config, regionMasksTask, oceanRegionalProfilesTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options regionMasksTask : ``ComputeRegionMasks`` @@ -57,7 +57,7 @@ def __init__(self, config, regionMasksTask, oceanRegionalProfilesTask, oceanRegionalProfilesTask : mpas_analysis.ocean.OceanRegionalProfiles A task for computing ocean regional profiles - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -76,6 +76,7 @@ def __init__(self, config, regionMasksTask, oceanRegionalProfilesTask, regionGroups = config.getexpression('hovmollerOceanRegions', 'regionGroups') + anyAnomalies = False for regionGroup in regionGroups: suffix = regionGroup[0].upper() + regionGroup[1:].replace(' ', '') @@ -87,6 +88,8 @@ def __init__(self, config, regionMasksTask, oceanRegionalProfilesTask, computeAnomaly = config.getboolean(regionGroupSection, 'computeAnomaly') + if computeAnomaly: + anyAnomalies = True fields = config.getexpression(regionGroupSection, 'fields') @@ -185,6 +188,8 @@ def __init__(self, config, regionMasksTask, oceanRegionalProfilesTask, self.add_subtask(hovmollerSubtask) self.run_after(oceanRegionalProfilesTask) + if anyAnomalies: + self.tags.append('anomaly') class ComputeHovmollerAnomalySubtask(AnalysisTask): diff --git a/mpas_analysis/ocean/index_nino34.py b/mpas_analysis/ocean/index_nino34.py index d4b018e70..b3cd49136 100644 --- a/mpas_analysis/ocean/index_nino34.py +++ b/mpas_analysis/ocean/index_nino34.py @@ -52,7 +52,7 @@ class IndexNino34(AnalysisTask): mpasTimeSeriesTask : ``MpasTimeSeriesTask`` The task that extracts the time series from MPAS monthly output - controlconfig : mpas_tools.config.MpasConfigParser + controlconfig : tranche.Tranche Configuration options for a control run (if any) """ # Authors @@ -66,13 +66,13 @@ def __init__(self, config, mpasTimeSeriesTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasTimeSeriesTask : ``MpasTimeSeriesTask`` The task that extracts the time series from MPAS monthly output - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors diff --git a/mpas_analysis/ocean/meridional_heat_transport.py b/mpas_analysis/ocean/meridional_heat_transport.py index cfe2a69e6..6534a03dd 100644 --- a/mpas_analysis/ocean/meridional_heat_transport.py +++ b/mpas_analysis/ocean/meridional_heat_transport.py @@ -34,7 +34,7 @@ class MeridionalHeatTransport(AnalysisTask): mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser + controlconfig : tranche.Tranche Configuration options for a control run (if any) """ @@ -48,13 +48,13 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -181,14 +181,10 @@ def run_task(self): # Read in depth and MHT latitude points # Latitude is from binBoundaryMerHeatTrans - try: - restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least ' - 'one for MHT calcuation') - - with xr.open_dataset(restartFileName) as dsRestart: - refBottomDepth = dsRestart.refBottomDepth + meshFilename = self.get_mesh_filename() + + with xr.open_dataset(meshFilename) as dsMesh: + refBottomDepth = dsMesh.refBottomDepth nVertLevels = refBottomDepth.sizes['nVertLevels'] refLayerThickness = np.zeros(nVertLevels) diff --git a/mpas_analysis/ocean/ocean_regional_profiles.py b/mpas_analysis/ocean/ocean_regional_profiles.py index 470ae5cd2..aac0df441 100644 --- a/mpas_analysis/ocean/ocean_regional_profiles.py +++ b/mpas_analysis/ocean/ocean_regional_profiles.py @@ -50,13 +50,13 @@ def __init__(self, config, regionMasksTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options regionMasksTask : ``ComputeRegionMasks`` A task for computing region masks - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -344,22 +344,22 @@ def run_task(self): return # get areaCell - restartFileName = \ - self.runStreams.readpath('restart')[0] + meshFilename = self.get_mesh_filename() - dsRestart = xr.open_dataset(restartFileName) - dsRestart = dsRestart.isel(Time=0) - areaCell = dsRestart.areaCell + dsMesh = xr.open_dataset(meshFilename) + dsMesh = dsMesh.isel(Time=0) + areaCell = dsMesh.areaCell + openOceanMask = xr.where(dsMesh.landIceMask > 0, 0, 1) - nVertLevels = dsRestart.sizes['nVertLevels'] + nVertLevels = dsMesh.sizes['nVertLevels'] vertIndex = \ xr.DataArray.from_dict({'dims': ('nVertLevels',), 'data': np.arange(nVertLevels)}) - vertMask = vertIndex < dsRestart.maxLevelCell + vertMask = vertIndex < dsMesh.maxLevelCell if self.max_bottom_depth is not None: - depthMask = dsRestart.bottomDepth < self.max_bottom_depth + depthMask = dsMesh.bottomDepth < self.max_bottom_depth vertDepthMask = np.logical_and(vertMask, depthMask) else: vertDepthMask = vertMask @@ -383,7 +383,9 @@ def run_task(self): cellMasks = dsRegionMask.regionCellMasks regionNamesVar = dsRegionMask.regionNames - totalArea = self._masked_area_sum(cellMasks, areaCell, vertDepthMask) + totalArea = self._masked_area_sum( + cellMasks, openOceanMask, areaCell, vertDepthMask + ) datasets = [] for timeIndex, fileName in enumerate(inputFiles): @@ -411,13 +413,18 @@ def run_task(self): var = dsLocal[variableName].where(vertDepthMask) meanName = '{}_mean'.format(prefix) - dsLocal[meanName] = \ - self._masked_area_sum(cellMasks, areaCell, var) / totalArea + dsLocal[meanName] = ( + self._masked_area_sum( + cellMasks, openOceanMask, areaCell, var + ) / totalArea + ) meanSquaredName = '{}_meanSquared'.format(prefix) - dsLocal[meanSquaredName] = \ - self._masked_area_sum(cellMasks, areaCell, var**2) / \ - totalArea + dsLocal[meanSquaredName] = ( + self._masked_area_sum( + cellMasks, openOceanMask, areaCell, var**2 + ) / totalArea + ) # drop the original variables dsLocal = dsLocal.drop_vars(variableList) @@ -436,14 +443,10 @@ def run_task(self): # Note: restart file, not a mesh file because we need refBottomDepth, # not in a mesh file - try: - restartFile = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one ' - 'restart file for plotting time series vs. depth') + meshFilename = self.get_mesh_filename() - with xr.open_dataset(restartFile) as dsRestart: - depths = dsRestart.refBottomDepth.values + with xr.open_dataset(meshFilename) as dsMesh: + depths = dsMesh.refBottomDepth.values z = np.zeros(depths.shape) z[0] = -0.5 * depths[0] z[1:] = -0.5 * (depths[0:-1] + depths[1:]) @@ -454,12 +457,15 @@ def run_task(self): write_netcdf_with_fill(dsOut, outputFileName) @staticmethod - def _masked_area_sum(cellMasks, areaCell, var): + def _masked_area_sum(cellMasks, openOceanMask, areaCell, var): """sum a variable over the masked areas""" nRegions = cellMasks.sizes['nRegions'] totals = [] for index in range(nRegions): - mask = cellMasks.isel(nRegions=slice(index, index+1)) + mask = ( + cellMasks.isel(nRegions=slice(index, index+1)) * + openOceanMask + ) totals.append((mask * areaCell * var).sum('nCells')) total = xr.concat(totals, 'nRegions') @@ -621,7 +627,7 @@ class PlotRegionalProfileTimeSeriesSubtask(AnalysisTask): field : dict Information about the field (e.g. temperature) being plotted - controlconfig : mpas_tools.config.MpasConfigParser + controlconfig : tranche.Tranche Configuration options for a control run (if any) """ # Authors @@ -658,7 +664,7 @@ def __init__(self, parentTask, masksSubtask, season, regionName, field, startYear, endYear : int The beginning and end of the time series to compute - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors diff --git a/mpas_analysis/ocean/osnap_transects.py b/mpas_analysis/ocean/osnap_transects.py index c04d5801b..e941e92fe 100644 --- a/mpas_analysis/ocean/osnap_transects.py +++ b/mpas_analysis/ocean/osnap_transects.py @@ -33,14 +33,14 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted as a transect - controlConfig : mpas_tools.config.MpasConfigParser, optional + controlConfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -67,8 +67,9 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): if verticalComparisonGridName in ['mpas', 'obs']: verticalComparisonGrid = None else: - verticalComparisonGrid = config.getexpression( - sectionName, 'verticalComparisonGrid', use_numpyfunc=True) + verticalComparisonGrid = config.getnumpy( + sectionName, 'verticalComparisonGrid' + ) verticalBounds = config.getexpression(sectionName, 'verticalBounds') diff --git a/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.py b/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.py index 57d18df16..4f2021dff 100644 --- a/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.py +++ b/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.py @@ -84,7 +84,7 @@ class PlotDepthIntegratedTimeSeriesSubtask(AnalysisTask): galleryName : str The name of the gallery in which this plot belongs - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche The configuration options for the control run (if any) """ # Authors @@ -150,7 +150,7 @@ def __init__(self, parentTask, regionName, inFileName, outFileLabel, subtaskName : str, optional The name of the subtask (``plotTimeSeries`` by default) - controlConfig : mpas_tools.config.MpasConfigParser, optional + controlConfig : tranche.Tranche, optional The configuration options for the control run (if any) """ # Authors diff --git a/mpas_analysis/ocean/plot_hovmoller_subtask.py b/mpas_analysis/ocean/plot_hovmoller_subtask.py index b590263bb..91a4fb70a 100644 --- a/mpas_analysis/ocean/plot_hovmoller_subtask.py +++ b/mpas_analysis/ocean/plot_hovmoller_subtask.py @@ -34,7 +34,7 @@ class PlotHovmollerSubtask(AnalysisTask): Attributes ---------- - controlconfig : mpas_tools.config.MpasConfigParser + controlconfig : tranche.Tranche Configuration options for a control run (if any) regionName : str @@ -148,7 +148,7 @@ def __init__(self, parentTask, regionName, inFileName, outFileLabel, subtaskName : str, optional The name of the subtask (``plotHovmoller`` by default) - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) regionMaskFile : str, optional @@ -259,19 +259,15 @@ def run_task(self): ds = ds.set_xindex('regionNames') ds = ds.sel(regionNames=self.regionName) - # Note: restart file, not a mesh file because we need refBottomDepth, + # Note: mesh file, not a mesh file because we need refBottomDepth, # not in a mesh file - try: - restartFile = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one ' - 'restart file for plotting time series vs. depth') + meshFilename = self.get_mesh_filename() # Define/read in general variables self.logger.info(' Read in depth...') - with xr.open_dataset(restartFile) as dsRestart: + with xr.open_dataset(meshFilename) as dsMesh: # reference depth [m] - depths = dsRestart.refBottomDepth.values + depths = dsMesh.refBottomDepth.values z = np.zeros(depths.shape) z[0] = -0.5 * depths[0] z[1:] = -0.5 * (depths[0:-1] + depths[1:]) diff --git a/mpas_analysis/ocean/plot_transect_subtask.py b/mpas_analysis/ocean/plot_transect_subtask.py index dc3168f41..1bb067b5b 100644 --- a/mpas_analysis/ocean/plot_transect_subtask.py +++ b/mpas_analysis/ocean/plot_transect_subtask.py @@ -23,8 +23,6 @@ from geometric_features import FeatureCollection -from mpas_tools.ocean.transects import get_outline_segments - from mpas_analysis.shared.plot import plot_vertical_section_comparison, \ savefig, add_inset @@ -58,7 +56,7 @@ class PlotTransectSubtask(AnalysisTask): plotObs : bool, optional Whether to plot against observations. - controlconfig : mpas_tools.config.MpasConfigParser + controlconfig : tranche.Tranche Configuration options for a control run (if any), ignored if ``plotObs == True`` @@ -135,7 +133,7 @@ def __init__(self, parentTask, season, transectName, fieldName, plotObs : bool, optional Whether to plot against observations. - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any), ignored if ``plotObs == True`` @@ -397,18 +395,12 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): else: x = 1e-3*remappedModelClimatology.dNode - z = None + z = remappedModelClimatology.zTransectNode lon = remappedModelClimatology.lonNode lat = remappedModelClimatology.latNode remappedModelClimatology['dNode'] = x - # flatten the x, lon and lat arrays because this is what - # vertical_section is expecting - x = xr.DataArray(data=x.values.ravel(), dims=('nx',)) - lon = xr.DataArray(data=lon.values.ravel(), dims=('nx',)) - lat = xr.DataArray(data=lat.values.ravel(), dims=('nx',)) - # This will do strange things at the antemeridian but there's little # we can do about that. lon_pm180 = numpy.mod(lon + 180., 360.) - 180. @@ -437,17 +429,13 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): modelOutput = remappedModelClimatology[self.mpasFieldName] - if remap: - triangulation_args = None - else: - triangulation_args = self._get_ds_triangulation( - remappedModelClimatology) - if remappedRefClimatology is None: refOutput = None bias = None else: refOutput = remappedRefClimatology[self.refFieldName] + # make sure the dimension order is the same + refOutput = refOutput.transpose(*modelOutput.dims) bias = modelOutput - refOutput filePrefix = self.filePrefix @@ -578,12 +566,11 @@ def _plot_transect(self, remappedModelClimatology, remappedRefClimatology): configSectionName, xCoords=xs, zCoord=z, - triangulation_args=triangulation_args, colorbarLabel=self.unitsLabel, xlabels=xLabels, ylabel=yLabel, title=title, - modelTitle='{}'.format(mainRunName), + modelTitle=mainRunName, refTitle=self.refTitleLabel, diffTitle=self.diffTitleLabel, numUpperTicks=numUpperTicks, @@ -694,7 +681,7 @@ def _lat_greater_extent(self, lat, lon): maxes = [] last_idx = 0 - while(len(lon_r) > 0 and len(lon_l) > 0): + while len(lon_r) > 0 and len(lon_l) > 0: if lon_r[0] < lon_l[0]: mins.append(numpy.min(lon[last_idx:lon_r[0]])) last_idx = lon_r[0] @@ -741,7 +728,8 @@ def _strictly_monotonic(self, coord): # Greg Streletz, Xylar Asay-Davis coord_diff = numpy.diff(coord.values) - coord_diff = numpy.where(coord_diff > 180, coord_diff - 360, coord_diff) + coord_diff = numpy.where(coord_diff > 180, coord_diff - 360, + coord_diff) coord_diff = numpy.where(coord_diff < -180, coord_diff + 360, coord_diff) return numpy.all(coord_diff > 0) or numpy.all(coord_diff < 0) @@ -838,24 +826,6 @@ def _lat_fewest_direction_changes(self, lat, lon): else: return False - def _get_ds_triangulation(self, dsTransectTriangles): - """get matplotlib Triangulation from triangulation dataset""" - - nTransectTriangles = dsTransectTriangles.sizes['nTransectTriangles'] - dNode = dsTransectTriangles.dNode.isel( - nSegments=dsTransectTriangles.segmentIndices, - nHorizBounds=dsTransectTriangles.nodeHorizBoundsIndices) - x = dNode.values.ravel() - - zTransectNode = dsTransectTriangles.zTransectNode - y = zTransectNode.values.ravel() - - tris = numpy.arange(3 * nTransectTriangles).reshape( - (nTransectTriangles, 3)) - triangulation_args = dict(x=x, y=y, triangles=tris) - - return triangulation_args - @staticmethod def _get_contour_colormap(): # https://stackoverflow.com/a/18926541/7728169 diff --git a/mpas_analysis/ocean/regional_ts_diagrams.py b/mpas_analysis/ocean/regional_ts_diagrams.py index 10f516444..151cd55a6 100644 --- a/mpas_analysis/ocean/regional_ts_diagrams.py +++ b/mpas_analysis/ocean/regional_ts_diagrams.py @@ -69,7 +69,7 @@ def __init__(self, config, mpasClimatologyTask, regionMasksTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` @@ -78,7 +78,7 @@ def __init__(self, config, mpasClimatologyTask, regionMasksTask, regionMasksTask : ``ComputeRegionMasks`` A task for computing region masks - controlConfig : mpas_tools.config.MpasConfigParser, optional + controlConfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -471,7 +471,7 @@ class ComputeRegionTSSubtask(AnalysisTask): sectionName : str The section of the config file to get options from - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche The configuration options for the control run (if any) mpasClimatologyTask : ``MpasClimatologyTask`` @@ -510,7 +510,7 @@ def __init__(self, parentTask, regionGroup, regionName, controlConfig, regionName : str Name of the region to plot - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) sectionName : str @@ -600,18 +600,14 @@ def _write_mpas_t_s(self, config): cellsChunk = 32768 chunk = {'nCells': cellsChunk} - try: - restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one' - ' restart file to plot T-S diagrams') - dsRestart = xarray.open_dataset(restartFileName) - dsRestart = dsRestart.isel(Time=0) - if 'landIceMask' in dsRestart: - landIceMask = dsRestart.landIceMask + meshFilename = self.get_mesh_filename() + dsMesh = xarray.open_dataset(meshFilename) + dsMesh = dsMesh.isel(Time=0) + if 'landIceMask' in dsMesh: + landIceMask = dsMesh.landIceMask else: landIceMask = None - dsRestart = dsRestart.chunk(chunk) + dsMesh = dsMesh.chunk(chunk) regionMaskFileName = self.mpasMasksSubtask.maskFileName @@ -653,11 +649,11 @@ def _write_mpas_t_s(self, config): 'timeMonthly_avg_layerThickness'] ds = ds[variableList] - ds['zMid'] = compute_zmid(dsRestart.bottomDepth, - dsRestart.maxLevelCell-1, - dsRestart.layerThickness) + ds['zMid'] = compute_zmid(dsMesh.bottomDepth, + dsMesh.maxLevelCell-1, + dsMesh.layerThickness) - ds['volume'] = (dsRestart.areaCell * + ds['volume'] = (dsMesh.areaCell * ds['timeMonthly_avg_layerThickness']) ds.load() @@ -799,7 +795,7 @@ class PlotRegionTSDiagramSubtask(AnalysisTask): sectionName : str The section of the config file to get options from - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche The configuration options for the control run (if any) mpasClimatologyTask : ``MpasClimatologyTask`` @@ -838,7 +834,7 @@ def __init__(self, parentTask, regionGroup, regionName, controlConfig, regionName : str Name of the region to plot - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) sectionName : str @@ -1014,8 +1010,8 @@ def run_task(self): plotFields.append({'S': obsS, 'T': obsT, 'z': obsZ, 'vol': obsVol, 'title': obsName}) - Tbins = config.getexpression(sectionName, 'Tbins', use_numpyfunc=True) - Sbins = config.getexpression(sectionName, 'Sbins', use_numpyfunc=True) + Tbins = config.getnumpy(sectionName, 'Tbins') + Sbins = config.getnumpy(sectionName, 'Sbins') normType = config.get(sectionName, 'normType') @@ -1210,10 +1206,8 @@ def _plot_volumetric_panel(self, T, S, volume): config = self.config sectionName = self.sectionName cmap = config.get(sectionName, 'colorMap') - Tbins = config.getexpression(sectionName, 'Tbins', - use_numpyfunc=True) - Sbins = config.getexpression(sectionName, 'Sbins', - use_numpyfunc=True) + Tbins = config.getnumpy(sectionName, 'Tbins') + Sbins = config.getnumpy(sectionName, 'Sbins') hist, _, _, panel = plt.hist2d(S, T, bins=[Sbins, Tbins], weights=volume, cmap=cmap, zorder=1, diff --git a/mpas_analysis/ocean/remap_depth_slices_subtask.py b/mpas_analysis/ocean/remap_depth_slices_subtask.py index a3395916d..7c60c89e4 100644 --- a/mpas_analysis/ocean/remap_depth_slices_subtask.py +++ b/mpas_analysis/ocean/remap_depth_slices_subtask.py @@ -13,7 +13,10 @@ from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask -from mpas_analysis.ocean.utility import compute_zmid +from mpas_analysis.ocean.utility import ( + compute_zinterface, + compute_zmid +) class RemapDepthSlicesSubtask(RemapMpasClimatologySubtask): @@ -27,11 +30,9 @@ class RemapDepthSlicesSubtask(RemapMpasClimatologySubtask): A list of depths at which the climatology will be sliced in the vertical. - maxLevelCell : xarray.DataArray - The vertical index of the bottom cell in MPAS results - - verticalIndices : xarray.DataArray - The vertical indices of slice to be plotted + dsSlice : xarray.Dataset + A dataset containing information needed to index variables at the + designated depths """ # Authors # ------- @@ -39,7 +40,7 @@ class RemapDepthSlicesSubtask(RemapMpasClimatologySubtask): def __init__(self, mpasClimatologyTask, parentTask, climatologyName, variableList, seasons, depths, comparisonGridNames=['latlon'], - iselValues=None): + iselValues=None, subtaskName='remapDepthSlices'): """ Construct the analysis task and adds it as a subtask of the @@ -78,18 +79,23 @@ def __init__(self, mpasClimatologyTask, parentTask, climatologyName, iselValues : dict, optional A dictionary of dimensions and indices (or ``None``) used to extract a slice of the MPAS field(s). + + subtaskName : str, optional + The name of the subtask """ # Authors # ------- # Xylar Asay-Davis self.depths = depths + self.dsSlice = xr.Dataset() # call the constructor from the base class # (RemapMpasClimatologySubtask) - super(RemapDepthSlicesSubtask, self).__init__( + super().__init__( mpasClimatologyTask, parentTask, climatologyName, variableList, - seasons, comparisonGridNames, iselValues) + seasons, comparisonGridNames, iselValues, + subtaskName=subtaskName) def run_task(self): """ @@ -105,71 +111,98 @@ def run_task(self): # ------- # Xylar Asay-Davis - # first, load the land-ice mask from the restart file - ds = xr.open_dataset(self.restartFileName) + # first, load the land-ice mask from the mesh file + ds = xr.open_dataset(self.meshFilename) ds = ds[['maxLevelCell', 'bottomDepth', 'layerThickness']] ds = ds.isel(Time=0) - self.maxLevelCell = ds.maxLevelCell - 1 - depthNames = [str(depth) for depth in self.depths] - zMid = compute_zmid(ds.bottomDepth, ds.maxLevelCell-1, - ds.layerThickness) - ocean_mask = (ds.maxLevelCell > 0) + bottomDepth = ds.bottomDepth + layerThickness = ds.layerThickness + maxLevelCell = ds.maxLevelCell - 1 + self.dsSlice['maxLevelCell'] = maxLevelCell + + zMid = compute_zmid(bottomDepth, maxLevelCell, layerThickness) - nVertLevels = zMid.shape[1] + zInterface = compute_zinterface( + bottomDepth, maxLevelCell, layerThickness) + + horizontalMask = maxLevelCell >= 0 + + nVertLevels = ds.sizes['nVertLevels'] zMid.coords['verticalIndex'] = \ ('nVertLevels', np.arange(nVertLevels)) - zTop = zMid.isel(nVertLevels=0) + nVertLevelsP1 = zInterface.sizes['nVertLevelsP1'] + zInterface.coords['verticalIndex'] = \ + ('nVertLevelsP1', + np.arange(nVertLevelsP1)) + + zLevelTop = zMid.isel(nVertLevels=0) # Each vertical layer has at most one non-NaN value so the "sum" # over the vertical is used to collapse the array in the vertical # dimension - zBot = zMid.where(zMid.verticalIndex == self.maxLevelCell).sum( + zLevelBot = zMid.where(zMid.verticalIndex == maxLevelCell).sum( dim='nVertLevels') - verticalIndices = np.zeros((len(self.depths), ds.sizes['nCells']), int) + zInterfaceTop = zInterface.isel(nVertLevelsP1=0) + zInterfaceBot = zInterface.where( + zInterface.verticalIndex == maxLevelCell + 1).sum( + dim='nVertLevelsP1') - mask = np.zeros(verticalIndices.shape, bool) + levelIndices = np.zeros((len(self.depths), ds.sizes['nCells']), int) + levelMask = np.zeros(levelIndices.shape, bool) + interfaceIndices = np.zeros(levelIndices.shape, int) + interfaceMask = np.zeros(levelIndices.shape, bool) for depthIndex, depth in enumerate(self.depths): depth = self.depths[depthIndex] if depth == 'top': - # switch to zero-based index - verticalIndices[depthIndex, :] = 0 - mask[depthIndex, :] = self.maxLevelCell.values >= 0 + levelIndices[depthIndex, :] = 0 + levelMask[depthIndex, :] = horizontalMask.values + interfaceIndices[depthIndex, :] = 0 + interfaceMask[depthIndex, :] = horizontalMask.values elif depth == 'bot': # switch to zero-based index - verticalIndices[depthIndex, :] = self.maxLevelCell.values - mask[depthIndex, :] = self.maxLevelCell.values >= 0 + levelIndices[depthIndex, :] = maxLevelCell.values + levelMask[depthIndex, :] = horizontalMask.values + interfaceIndices[depthIndex, :] = maxLevelCell.values + 1 + interfaceMask[depthIndex, :] = horizontalMask.values else: - - diff = np.abs(zMid - depth).where(ocean_mask, drop=True) - verticalIndex = diff.argmin(dim='nVertLevels') - - verticalIndices[depthIndex, ocean_mask.values] = \ - verticalIndex.values - mask[depthIndex, :] = np.logical_and(depth <= zTop, - depth >= zBot).values - - self.verticalIndices = \ - xr.DataArray.from_dict({'dims': ('depthSlice', 'nCells'), - 'coords': {'depthSlice': - {'dims': ('depthSlice',), - 'data': depthNames}}, - 'data': verticalIndices}) - self.verticalIndexMask = \ - xr.DataArray.from_dict({'dims': ('depthSlice', 'nCells'), - 'coords': {'depthSlice': - {'dims': ('depthSlice',), - 'data': depthNames}}, - 'data': mask}) + levelDiff = np.abs(zMid - depth).where(horizontalMask, + drop=True) + levelIndex = levelDiff.argmin(dim='nVertLevels') + + levelIndices[depthIndex, horizontalMask.values] = \ + levelIndex.values + levelMask[depthIndex, :] = np.logical_and( + depth <= zLevelTop, depth >= zLevelBot).values + + interfaceDiff = np.abs(zInterface - depth).where( + horizontalMask, drop=True) + interfaceIndex = interfaceDiff.argmin(dim='nVertLevelsP1') + + interfaceIndices[depthIndex, horizontalMask.values] = \ + interfaceIndex.values + interfaceMask[depthIndex, :] = np.logical_and( + depth <= zInterfaceTop, depth >= zInterfaceBot).values + + self.dsSlice.coords['depthSlice'] = ('depthSlice', depthNames) + + self.dsSlice['levelIndices'] = (('depthSlice', 'nCells'), + levelIndices) + self.dsSlice['levelIndexMask'] = (('depthSlice', 'nCells'), + levelMask) + self.dsSlice['interfaceIndices'] = (('depthSlice', 'nCells'), + interfaceIndices) + self.dsSlice['interfaceIndexMask'] = (('depthSlice', 'nCells'), + interfaceMask) # then, call run from the base class (RemapMpasClimatologySubtask), # which will perform the main function of the task - super(RemapDepthSlicesSubtask, self).run_task() + super().run_task() def customize_masked_climatology(self, climatology, season): """ @@ -197,29 +230,48 @@ def customize_masked_climatology(self, climatology, season): if self.depths is None: return climatology - climatology.coords['verticalIndex'] = \ - ('nVertLevels', - np.arange(climatology.sizes['nVertLevels'])) + if 'nVertLevels' in climatology.dims: + climatology.coords['levelIndex'] = \ + ('nVertLevels', + np.arange(climatology.sizes['nVertLevels'])) + if 'nVertLevelsP1' in climatology.dims: + climatology.coords['interfaceIndex'] = \ + ('nVertLevelsP1', + np.arange(climatology.sizes['nVertLevelsP1'])) depthNames = [str(depth) for depth in self.depths] climatology.coords['depthSlice'] = ('depthSlice', depthNames) - for variableName in self.variableList: - if 'nVertLevels' not in climatology[variableName].dims: - continue - - # mask only the values with the right vertical index - da = climatology[variableName].where( - climatology.verticalIndex == self.verticalIndices) - - # Each vertical layer has at most one non-NaN value so the "sum" - # over the vertical is used to collapse the array in the vertical - # dimension - climatology[variableName] = \ - da.sum(dim='nVertLevels').where(self.verticalIndexMask) - - climatology = climatology.drop_vars('verticalIndex') + levelIndices = self.dsSlice.levelIndices + levelIndexMask = self.dsSlice.levelIndexMask + interfaceIndices = self.dsSlice.interfaceIndices + interfaceIndexMask = self.dsSlice.interfaceIndexMask + + # iterate over all variables since some new ones may have been + # added by a subclass + for variableName in climatology.data_vars: + if 'nVertLevels' in climatology[variableName].dims: + # mask only the values with the right vertical index + da = climatology[variableName].where( + climatology.levelIndex == levelIndices) + + # Each vertical layer has at most one non-NaN value so the + # "sum" over the vertical is used to collapse the array in the + # vertical dimension + climatology[variableName] = \ + da.sum(dim='nVertLevels').where(levelIndexMask) + elif 'nVertLevelsP1' in climatology[variableName].dims: + da = climatology[variableName].where( + climatology.interfaceIndex == interfaceIndices) + + climatology[variableName] = \ + da.sum(dim='nVertLevelsP1').where(interfaceIndexMask) + + if 'levelIndex' in climatology.coords: + climatology = climatology.drop_vars('levelIndex') + if 'interfaceIndex' in climatology.coords: + climatology = climatology.drop_vars('interfaceIndex') climatology = climatology.transpose('depthSlice', 'nCells') diff --git a/mpas_analysis/ocean/remap_sose_climatology.py b/mpas_analysis/ocean/remap_sose_climatology.py index fd188c85f..ad09fa7d4 100644 --- a/mpas_analysis/ocean/remap_sose_climatology.py +++ b/mpas_analysis/ocean/remap_sose_climatology.py @@ -114,7 +114,7 @@ def get_observation_descriptor(self, fileName): # stereographic coordinates projection = get_pyproj_projection(comparison_grid_name='antarctic') obsDescriptor = ProjectionGridDescriptor.read( - projection, fileName=fileName, xVarName='x', yVarName='y') + projection, filename=fileName, x_var_name='x', y_var_name='y') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/ocean/sose_transects.py b/mpas_analysis/ocean/sose_transects.py index 3fb2e67a4..d9f2bee57 100644 --- a/mpas_analysis/ocean/sose_transects.py +++ b/mpas_analysis/ocean/sose_transects.py @@ -45,14 +45,14 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted as a transect - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -79,13 +79,13 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): if verticalComparisonGridName in ['mpas', 'obs']: verticalComparisonGrid = None else: - verticalComparisonGrid = config.getexpression( - sectionName, 'verticalComparisonGrid', use_numpyfunc=True) + verticalComparisonGrid = config.getnumpy( + sectionName, 'verticalComparisonGrid' + ) verticalBounds = config.getexpression(sectionName, 'verticalBounds') - longitudes = sorted(config.getexpression(sectionName, 'longitudes', - use_numpyfunc=True)) + longitudes = sorted(config.getnumpy(sectionName, 'longitudes')) fields = \ [{'prefix': 'temperature', @@ -236,7 +236,7 @@ def __init__(self, config, horizontalResolution, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options horizontalResolution : str @@ -295,8 +295,7 @@ def combine_observations(self): config = self.config - longitudes = sorted(config.getexpression('soseTransects', 'longitudes', - use_numpyfunc=True)) + longitudes = sorted(config.getnumpy('soseTransects', 'longitudes')) observationsDirectory = build_obs_path( config, 'ocean', 'soseSubdirectory') diff --git a/mpas_analysis/ocean/streamfunction_moc.py b/mpas_analysis/ocean/streamfunction_moc.py index ca69a8918..26ef1a32e 100644 --- a/mpas_analysis/ocean/streamfunction_moc.py +++ b/mpas_analysis/ocean/streamfunction_moc.py @@ -58,13 +58,13 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -362,14 +362,10 @@ def _compute_moc_climo_analysismember(self): regionNames.append('Global') # Read in depth and bin latitudes - try: - restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least ' - 'one for MHT calcuation') + meshFilename = self.get_mesh_filename() - with xr.open_dataset(restartFileName) as dsRestart: - refBottomDepth = dsRestart.refBottomDepth.values + with xr.open_dataset(meshFilename) as dsMesh: + refBottomDepth = dsMesh.refBottomDepth.values nVertLevels = len(refBottomDepth) refLayerThickness = np.zeros(nVertLevels) @@ -490,7 +486,7 @@ def _compute_moc_climo_postprocess(self): dvEdge, areaCell, refBottomDepth, latCell, nVertLevels, \ refTopDepth, refLayerThickness, cellsOnEdge = \ - _load_mesh(self.runStreams) + _load_mesh(self.get_mesh_filename()) regionNames = config.getexpression(self.sectionName, 'regionNames') @@ -643,7 +639,7 @@ def __init__(self, parentTask, controlConfig): parentTask : ``StreamfunctionMOC`` The main task of which this is a subtask - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -1082,13 +1078,11 @@ def _compute_moc_time_series_analysismember(self): sizes = dsLocal.sizes moc = np.zeros((len(inputFiles), sizes['nVertLevels']+1, len(binBoundaryMocStreamfunction))) - try: - restartFile = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at ' - 'least one restart file for MOC calculation') - with xr.open_dataset(restartFile) as dsRestart: - refBottomDepth = dsRestart.refBottomDepth.values + + meshFilename = self.get_mesh_filename() + + with xr.open_dataset(meshFilename) as dsMesh: + refBottomDepth = dsMesh.refBottomDepth.values nVertLevels = len(refBottomDepth) refTopDepth = np.zeros(nVertLevels + 1) refTopDepth[1:nVertLevels + 1] = refBottomDepth[0:nVertLevels] @@ -1160,7 +1154,7 @@ def _compute_moc_time_series_postprocess(self): dvEdge, areaCell, refBottomDepth, latCell, nVertLevels, \ refTopDepth, refLayerThickness, cellsOnEdge = \ - _load_mesh(self.runStreams) + _load_mesh(self.get_mesh_filename()) mpasMeshName = config.get('input', 'mpasMeshName') @@ -1350,7 +1344,7 @@ def __init__(self, parentTask, startYears, endYears): parentTask : ``StreamfunctionMOC`` The main task of which this is a subtask - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -1422,7 +1416,7 @@ def __init__(self, parentTask, controlConfig): parentTask : ``StreamfunctionMOC`` The main task of which this is a subtask - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -1570,14 +1564,9 @@ def _load_moc(self, config): return dsMOCTimeSeries -def _load_mesh(runStreams): +def _load_mesh(meshFilename): # Load mesh related variables - try: - restartFile = runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one ' - 'restart file for MOC calculation') - ncFile = netCDF4.Dataset(restartFile, mode='r') + ncFile = netCDF4.Dataset(meshFilename, mode='r') dvEdge = ncFile.variables['dvEdge'][:] areaCell = ncFile.variables['areaCell'][:] refBottomDepth = ncFile.variables['refBottomDepth'][:] diff --git a/mpas_analysis/ocean/time_series_antarctic_melt.py b/mpas_analysis/ocean/time_series_antarctic_melt.py index 911db61b6..76da6522a 100644 --- a/mpas_analysis/ocean/time_series_antarctic_melt.py +++ b/mpas_analysis/ocean/time_series_antarctic_melt.py @@ -50,7 +50,7 @@ def __init__(self, config, mpasTimeSeriesTask, regionMasksTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasTimeSeriesTask : ``MpasTimeSeriesTask`` @@ -59,7 +59,7 @@ def __init__(self, config, mpasTimeSeriesTask, regionMasksTask, regionMasksTask : ``ComputeRegionMasks`` A task for computing region masks - controlConfig : mpas_tools.config.MpasConfigParser, optional + controlConfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -175,7 +175,6 @@ def __init__(self, parentTask, startYear, endYear, mpasTimeSeriesTask, self.run_after(masksSubtask) self.iceShelvesToPlot = iceShelvesToPlot - self.restartFileName = None self.startYear = startYear self.endYear = endYear self.startDate = f'{self.startYear:04d}-01-01_00:00:00' @@ -217,13 +216,6 @@ def setup_and_check(self): ' Otherwise, no melt rates are available \n' ' for plotting.') - # Load mesh related variables - try: - self.restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one ' - 'restart file for Antarctic melt calculations') - totalFluxVar = 'timeMonthly_avg_landIceFreshwaterFluxTotal' landIceFluxVar = 'timeMonthly_avg_landIceFreshwaterFlux' if totalFluxVar in self.mpasTimeSeriesTask.allVariables: @@ -284,12 +276,11 @@ def run_task(self): f'Deleting it.') os.remove(outFileName) - restartFileName = \ - mpasTimeSeriesTask.runStreams.readpath('restart')[0] + meshFilename = self.get_mesh_filename() - dsRestart = xarray.open_dataset(restartFileName) - landIceFraction = dsRestart.landIceFraction.isel(Time=0) - areaCell = dsRestart.areaCell + dsMesh = xarray.open_dataset(meshFilename) + landIceFraction = dsMesh.landIceFraction.isel(Time=0) + areaCell = dsMesh.areaCell regionMaskFileName = self.masksSubtask.maskFileName @@ -351,7 +342,7 @@ def run_task(self): dsOut = xarray.concat(objs=datasets, dim='Time') dsOut['regionNames'] = dsRegionMask.regionNames - dsOut.integratedMeltFlux.attrs['units'] = 'GT a$^{-1}$' + dsOut.integratedMeltFlux.attrs['units'] = 'Gt a$^{-1}$' dsOut.integratedMeltFlux.attrs['description'] = \ 'Integrated melt flux summed over each ice shelf or region' dsOut.meltRates.attrs['units'] = 'm a$^{-1}$' @@ -440,7 +431,7 @@ class PlotMeltSubtask(AnalysisTask): regionIndex : int The index into the dimension ``nRegions`` of the ice shelf to plot - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche The configuration options for the control run (if any) """ @@ -465,7 +456,7 @@ def __init__(self, parentTask, iceShelf, regionIndex, controlConfig): regionIndex : int The index into the dimension ``nRegions`` of the ice shelf to plot - controlConfig : mpas_tools.config.MpasConfigParser, optional + controlConfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -671,7 +662,7 @@ def run_task(self): suffix = self.iceShelf.replace(' ', '_') xLabel = 'Time (yr)' - yLabel = 'Melt Flux (GT/yr)' + yLabel = 'Melt Flux (Gt/yr)' timeSeries = integratedMeltFlux.isel(nRegions=self.regionIndex) @@ -732,7 +723,7 @@ def run_task(self): # and cartopy doesn't play too well with tight_layout anyway plt.tight_layout() - add_inset(fig, fc, width=2.0, height=2.0) + add_inset(fig, fc, width=1.0, height=1.0, lowerleft=[0.0, 0.0], xbuffer=0.01, ybuffer=0.01) savefig(outFileName, config) @@ -797,7 +788,7 @@ def run_task(self): # and cartopy doesn't play too well with tight_layout anyway plt.tight_layout() - add_inset(fig, fc, width=2.0, height=2.0) + add_inset(fig, fc, width=1.0, height=1.0, lowerleft=[0.0, 0.0], xbuffer=0.01, ybuffer=0.01) savefig(outFileName, config) diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index 95572eed7..056df5d22 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -44,13 +44,13 @@ def __init__(self, config, regionMasksTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options regionMasksTask : ``ComputeRegionMasks`` A task for computing region masks - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -106,11 +106,17 @@ def __init__(self, config, regionMasksTask, controlConfig=None): 'tDim': 'time', 'legend': 'WOA23 1991-2020 ANN mean'}} + anyAnomalies = False + for regionGroup in regionGroups: sectionSuffix = regionGroup[0].upper() + \ regionGroup[1:].replace(' ', '') sectionName = 'timeSeries{}'.format(sectionSuffix) + anomalyVars = config.getexpression(sectionName, 'anomalies') + if len(anomalyVars) > 0: + anyAnomalies = True + regionNames = config.getexpression(sectionName, 'regionNames') if len(regionNames) == 0: # no regions in this group were requested @@ -192,6 +198,9 @@ def __init__(self, config, regionMasksTask, controlConfig=None): plotRegionSubtask.run_after(combineSubtask) self.add_subtask(plotRegionSubtask) + if anyAnomalies: + self.tags.append('anomaly') + class ComputeRegionDepthMasksSubtask(AnalysisTask): """ @@ -286,11 +295,7 @@ def run_task(self): return # Load mesh related variables - try: - restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one ' - 'restart file for ocean region time series') + meshFilename = self.get_mesh_filename() if config.has_option(sectionName, 'zmin'): config_zmin = config.getfloat(sectionName, 'zmin') @@ -302,13 +307,13 @@ def run_task(self): else: config_zmax = None - dsRestart = xarray.open_dataset(restartFileName).isel(Time=0) - zMid = compute_zmid(dsRestart.bottomDepth, dsRestart.maxLevelCell-1, - dsRestart.layerThickness) - areaCell = dsRestart.areaCell - if 'landIceMask' in dsRestart: + dsMesh = xarray.open_dataset(meshFilename).isel(Time=0) + zMid = compute_zmid(dsMesh.bottomDepth, dsMesh.maxLevelCell-1, + dsMesh.layerThickness) + areaCell = dsMesh.areaCell + if 'landIceMask' in dsMesh: # only the region outside of ice-shelf cavities - openOceanMask = dsRestart.landIceMask == 0 + openOceanMask = dsMesh.landIceMask == 0 else: openOceanMask = None @@ -1041,7 +1046,7 @@ class PlotRegionTimeSeriesSubtask(AnalysisTask): sectionName : str The section of the config file to get options from - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche The configuration options for the control run (if any) """ @@ -1071,7 +1076,7 @@ def __init__(self, parentTask, regionGroup, regionName, regionIndex, regionIndex : int The index into the dimension ``nRegions`` of the region to plot - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) sectionName : str @@ -1345,7 +1350,7 @@ def run_task(self): # and cartopy doesn't play too well with tight_layout anyway plt.tight_layout() - add_inset(fig, fc, width=2.0, height=2.0) + add_inset(fig, fc, width=1.0, height=1.0, lowerleft=[0.0, 0.0], xbuffer=0.01, ybuffer=0.01) savefig(outFileName, config, tight=False) diff --git a/mpas_analysis/ocean/time_series_ohc_anomaly.py b/mpas_analysis/ocean/time_series_ohc_anomaly.py index 7a8de35f8..b3dbf21d0 100644 --- a/mpas_analysis/ocean/time_series_ohc_anomaly.py +++ b/mpas_analysis/ocean/time_series_ohc_anomaly.py @@ -43,13 +43,13 @@ def __init__(self, config, mpasTimeSeriesTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasTimeSeriesTask : ``MpasTimeSeriesTask`` The task that extracts the time series from MPAS monthly output - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -152,20 +152,15 @@ def _compute_ohc(self, ds): ds.ohc.attrs['units'] = '$10^{22}$ J' ds.ohc.attrs['description'] = 'Ocean heat content in each region' - # Note: restart file, not a mesh file because we need refBottomDepth, - # not in a mesh file - try: - restartFile = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one ' - 'restart file for OHC calculation') + meshFile = self.get_mesh_filename() # Define/read in general variables - with xr.open_dataset(restartFile) as dsRestart: + with xr.open_dataset(meshFile) as dsMesh: # reference depth [m] # add depths as a coordinate to the data set - ds.coords['depth'] = (('nVertLevels',), - dsRestart.refBottomDepth.values) + ds.coords['depth'] = ( + ('nVertLevels',), dsMesh.refBottomDepth.values + ) return ds diff --git a/mpas_analysis/ocean/time_series_salinity_anomaly.py b/mpas_analysis/ocean/time_series_salinity_anomaly.py index 52290e897..e0dfd2376 100644 --- a/mpas_analysis/ocean/time_series_salinity_anomaly.py +++ b/mpas_analysis/ocean/time_series_salinity_anomaly.py @@ -34,7 +34,7 @@ def __init__(self, config, mpasTimeSeriesTask): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options mpasTimeSeriesTask : ``MpasTimeSeriesTask`` diff --git a/mpas_analysis/ocean/time_series_ssh_anomaly.py b/mpas_analysis/ocean/time_series_ssh_anomaly.py index 9e981e2cd..0043c06f2 100644 --- a/mpas_analysis/ocean/time_series_ssh_anomaly.py +++ b/mpas_analysis/ocean/time_series_ssh_anomaly.py @@ -36,7 +36,7 @@ class TimeSeriesSSHAnomaly(AnalysisTask): timeSeriesFileName : str The name of the file where the ssh anomaly is stored - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Configuration options for a control run (if one is provided) filePrefix : str @@ -53,13 +53,13 @@ def __init__(self, config, mpasTimeSeriesTask, controlConfig): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasTimeSeriesTask : mpas_analysis.shared.time_series.MpasTimeSeriesTask The task that extracts the time series from MPAS monthly output - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Configuration options for a control run (if any) """ # Authors diff --git a/mpas_analysis/ocean/time_series_sst.py b/mpas_analysis/ocean/time_series_sst.py index d1fb3b649..7d95b8bec 100644 --- a/mpas_analysis/ocean/time_series_sst.py +++ b/mpas_analysis/ocean/time_series_sst.py @@ -40,7 +40,7 @@ class TimeSeriesSST(AnalysisTask): mpasTimeSeriesTask : ``MpasTimeSeriesTask`` The task that extracts the time series from MPAS monthly output - controlconfig : mpas_tools.config.MpasConfigParser + controlconfig : tranche.Tranche Configuration options for a control run (if any) """ # Authors @@ -54,13 +54,13 @@ def __init__(self, config, mpasTimeSeriesTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasTimeSeriesTask : ``MpasTimeSeriesTask`` The task that extracts the time series from MPAS monthly output - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors diff --git a/mpas_analysis/ocean/time_series_temperature_anomaly.py b/mpas_analysis/ocean/time_series_temperature_anomaly.py index 6dbd43f65..9469fcf77 100644 --- a/mpas_analysis/ocean/time_series_temperature_anomaly.py +++ b/mpas_analysis/ocean/time_series_temperature_anomaly.py @@ -34,7 +34,7 @@ def __init__(self, config, mpasTimeSeriesTask): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options mpasTimeSeriesTask : ``MpasTimeSeriesTask`` diff --git a/mpas_analysis/ocean/time_series_transport.py b/mpas_analysis/ocean/time_series_transport.py index 0d00a6a64..e23996fd0 100644 --- a/mpas_analysis/ocean/time_series_transport.py +++ b/mpas_analysis/ocean/time_series_transport.py @@ -49,10 +49,10 @@ def __init__(self, config, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -124,7 +124,7 @@ class ComputeTransportSubtask(AnalysisTask): transectsToPlot : list of str A list of transects to plot - + groupSuffix : str standard transects vs Arctic transects """ @@ -159,7 +159,7 @@ def __init__(self, parentTask, startYear, endYear, # Authors # ------- # Xylar Asay-Davis - subtaskName = f'compute{groupSuffix}_{startYear:04d}-{endYear:04d}' + subtaskName = f'compute{groupSuffix}_{startYear:04d}-{endYear:04d}' # first, call the constructor from the base class (AnalysisTask) super().__init__( config=parentTask.config, @@ -167,7 +167,7 @@ def __init__(self, parentTask, startYear, endYear, componentName=parentTask.componentName, tags=parentTask.tags, subtaskName=subtaskName) - + self.subprocessCount = self.config.getint(f'timeSeries{groupSuffix}', 'subprocessCount') self.startYear = startYear @@ -177,7 +177,7 @@ def __init__(self, parentTask, startYear, endYear, self.run_after(masksSubtask) self.transectsToPlot = transectsToPlot - self.restartFileName = None + self.meshFilename = None self.groupSuffix = groupSuffix def setup_and_check(self): @@ -208,11 +208,7 @@ def setup_and_check(self): raiseException=True) # Load mesh related variables - try: - self.restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one ' - 'restart file for transport calculations') + self.meshFilename = self.get_mesh_filename() def run_task(self): """ @@ -293,7 +289,7 @@ def run_task(self): # figure out the indices of the transects to plot maskTransectNames = decode_strings(dsTransectMask.transectNames) - dsMesh = xarray.open_dataset(self.restartFileName) + dsMesh = xarray.open_dataset(self.meshFilename) dsMesh = dsMesh[['dvEdge', 'cellsOnEdge']] dsMesh.load() dvEdge = dsMesh.dvEdge @@ -417,7 +413,7 @@ def __init__(self, parentTask, startYears, endYears, groupSuffix): # Authors # ------- # Xylar Asay-Davis - + # first, call the constructor from the base class (AnalysisTask) super(CombineTransportSubtask, self).__init__( config=parentTask.config, @@ -468,13 +464,13 @@ class PlotTransportSubtask(AnalysisTask): transectIndex : int The index into the dimension ``nTransects`` of the transect to plot - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche The configuration options for the control run (if any) transportGroup : str (with spaces) standard transects (``Transport Transects``) vs Arctic transects (``Arctic Transport Transects``) - + """ # Authors @@ -499,7 +495,7 @@ def __init__(self, parentTask, transect, transectIndex, controlConfig, transectIndex : int The index into the dimension ``nTransects`` of the transect to plot - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) transportGroup : str (with spaces) diff --git a/mpas_analysis/ocean/utility.py b/mpas_analysis/ocean/utility.py index a96c7c3fa..f6a6a8f3e 100644 --- a/mpas_analysis/ocean/utility.py +++ b/mpas_analysis/ocean/utility.py @@ -15,8 +15,8 @@ # ------- # Xylar Asay-Davis -import numpy -import xarray +import numpy as np +import xarray as xr def add_standard_regions_and_subset(ds, config, regionShortNames=None): @@ -30,7 +30,7 @@ def add_standard_regions_and_subset(ds, config, regionShortNames=None): ds : xarray.Dataset the dataset to which region names should be added - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options regionShortNames : list of str, optional @@ -63,7 +63,7 @@ def get_standard_region_names(config, regionShortNames): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options regionShortNames : list of str @@ -111,9 +111,12 @@ def compute_zmid(bottomDepth, maxLevelCell, layerThickness): nVertLevels = layerThickness.sizes['nVertLevels'] - vertIndex = \ - xarray.DataArray.from_dict({'dims': ('nVertLevels',), - 'data': numpy.arange(nVertLevels)}) + vertIndex = xr.DataArray.from_dict( + { + 'dims': ('nVertLevels',), + 'data': np.arange(nVertLevels) + } + ) layerThickness = layerThickness.where(vertIndex <= maxLevelCell) @@ -126,3 +129,169 @@ def compute_zmid(bottomDepth, maxLevelCell, layerThickness): zMid = zLayerBot + 0.5 * layerThickness return zMid + + +def compute_zinterface(bottomDepth, maxLevelCell, layerThickness): + """ + Computes zInterface given data arrays for bottomDepth, maxLevelCell and + layerThickness + + Parameters + ---------- + bottomDepth : ``xarray.DataArray`` + the depth of the ocean bottom (positive) + + maxLevelCell : ``xarray.DataArray`` + the 0-based vertical index of the bottom of the ocean + + layerThickness : ``xarray.DataArray`` + the thickness of MPAS-Ocean layers (possibly as a function of time) + + Returns + ------- + zInterface : ``xarray.DataArray`` + the vertical coordinate defining the interfaces between layers, masked + below the bathymetry + """ + # Authors + # ------- + # Xylar Asay-Davis + + nVertLevels = layerThickness.sizes['nVertLevels'] + + vertIndex = xr.DataArray.from_dict( + { + 'dims': ('nVertLevels',), + 'data': np.arange(nVertLevels) + } + ) + + layerThickness = layerThickness.where(vertIndex <= maxLevelCell) + thicknessSum = layerThickness.sum(dim='nVertLevels') + + zSurface = -bottomDepth + thicknessSum + + zInterfaceList = [zSurface] + + zTop = zSurface + + for zIndex in range(nVertLevels): + zBot = zTop - layerThickness.isel(nVertLevels=zIndex) + zInterfaceList.append(zBot) + zTop = zBot + + zInterface = xr.concat(zInterfaceList, dim='nVertLevelsP1').transpose( + 'nCells', 'nVertLevelsP1') + return zInterface + + +def vector_cell_to_edge_isotropic(ds_mesh, zonal_cell, meridional_cell): + """ + Compute the zonal and meridional components of a vector at edges from + cell-centered components using isotropic area-weighted averaging. + + Parameters + ---------- + ds_mesh : xarray.Dataset + MPAS mesh variables, must include: + - verticesOnEdge + - cellsOnVertex + - kiteAreasOnVertex + + zonal_cell : xarray.DataArray + Zonal component at cell centers (nCells,) + + meridional_cell : xarray.DataArray + Meridional component at cell centers (nCells,) + + Returns + ------- + zonal_edge : xarray.DataArray + Zonal component at edges (nEdges,) + + meridional_edge : xarray.DataArray + Meridional component at edges (nEdges,) + """ + vertices_on_edge = ds_mesh.verticesOnEdge - 1 + cells_on_vertex = ds_mesh.cellsOnVertex - 1 + kite_areas_on_vertex = ds_mesh.kiteAreasOnVertex + + n_edges = vertices_on_edge.sizes['nEdges'] + vertex_degree = cells_on_vertex.sizes['vertexDegree'] + + zonal_edge = np.zeros(n_edges, dtype=float) + meridional_edge = np.zeros(n_edges, dtype=float) + area_sum = np.zeros(n_edges, dtype=float) + + for v in range(2): + # all valid edges have 2 valid vertices on that edge + voe = vertices_on_edge.isel(TWO=v) + for c in range(vertex_degree): + # cells on vertices on edge + covoe = cells_on_vertex.isel( + vertexDegree=c, + nVertices=voe + ) + valid = covoe >= 0 + valid_covoe = covoe.isel(nEdges=valid) + valid_voe = voe.isel(nEdges=valid) + area = kite_areas_on_vertex.isel( + vertexDegree=c, + nVertices=valid_voe + ).values + if np.any(area == 0): + raise ValueError( + "Some kite areas of valid cells on vertex have zero area. " + "This seems to be a bug in the mesh or " + "vector_cell_to_edge_isotropic()." + ) + zcell = zonal_cell.isel(nCells=valid_covoe).values + mcell = meridional_cell.isel(nCells=valid_covoe).values + zonal_edge[valid] += zcell * area + meridional_edge[valid] += mcell * area + area_sum[valid] += area + + if np.any(area_sum == 0): + raise ValueError( + "Some edges have zero area. This seems to be a bug in the mesh " + "or vector_cell_to_edge_isotropic()." + ) + + # Normalize by the area sum to get the average + zonal_edge /= area_sum + meridional_edge /= area_sum + + # Wrap as xarray DataArrays + zonal_edge = xr.DataArray(zonal_edge, dims=('nEdges',)) + meridional_edge = xr.DataArray(meridional_edge, dims=('nEdges',)) + return zonal_edge, meridional_edge + + +def vector_to_edge_normal(ds_mesh, zonal_edge, meridional_edge): + """ + Compute the normal component of a vector at an edge from + the zonal and meridional components. + + Parameters + ---------- + ds_mesh : xarray.Dataset + MPAS mesh variables, must include: + - angleEdge + + zonal_edge : xarray.DataArray + Zonal component at edges (nEdges,) + + meridional_edge : xarray.DataArray + Meridional component at edges (nEdges,) + + Returns + ------- + normal_edge : xarray.DataArray + Normal component at edges (nEdges,) + """ + + angle_edge = ds_mesh.angleEdge + normal_edge = ( + np.cos(angle_edge) * zonal_edge + np.sin(angle_edge) * meridional_edge + ) + return normal_edge diff --git a/mpas_analysis/ocean/woa_transects.py b/mpas_analysis/ocean/woa_transects.py index 83bd7a88f..d48e8fd97 100644 --- a/mpas_analysis/ocean/woa_transects.py +++ b/mpas_analysis/ocean/woa_transects.py @@ -45,14 +45,14 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted as a transect - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -79,8 +79,9 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): if verticalComparisonGridName in ['mpas', 'obs']: verticalComparisonGrid = None else: - verticalComparisonGrid = config.getexpression( - sectionName, 'verticalComparisonGrid', use_numpyfunc=True) + verticalComparisonGrid = config.getnumpy( + sectionName, 'verticalComparisonGrid' + ) verticalBounds = config.getexpression(sectionName, 'verticalBounds') @@ -202,7 +203,7 @@ def __init__(self, config, horizontalResolution, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options horizontalResolution : str @@ -362,8 +363,7 @@ def build_observational_dataset(self, fileName, transectName): def _get_longitudes(config): - longitudes = config.getexpression('woaTransects', 'longitudes', - use_numpyfunc=True) + longitudes = config.getnumpy('woaTransects', 'longitudes') longitudes = np.array(longitudes) # make sure longitudes are between -180 and 180 longitudes = np.sort(np.mod(longitudes + 180., 360.) - 180.) diff --git a/mpas_analysis/ocean/woce_transects.py b/mpas_analysis/ocean/woce_transects.py index 2e3a68de7..039569d80 100644 --- a/mpas_analysis/ocean/woce_transects.py +++ b/mpas_analysis/ocean/woce_transects.py @@ -35,14 +35,14 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` The task that produced the climatology to be remapped and plotted as a transect - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -69,8 +69,9 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None): if verticalComparisonGridName in ['mpas', 'obs']: verticalComparisonGrid = None else: - verticalComparisonGrid = config.getexpression( - sectionName, 'verticalComparisonGrid', use_numpyfunc=True) + verticalComparisonGrid = config.getnumpy( + sectionName, 'verticalComparisonGrid' + ) verticalBounds = config.getexpression(sectionName, 'verticalBounds') diff --git a/mpas_analysis/polar_regions.cfg b/mpas_analysis/polar_regions.cfg index f6e4a9b1b..ef6fbfaa7 100644 --- a/mpas_analysis/polar_regions.cfg +++ b/mpas_analysis/polar_regions.cfg @@ -788,6 +788,15 @@ makeTables = True # ['all'] for all 106 ice shelves and regions. iceShelvesInTable = ['all'] + +[climatologyMapWindStressCurl] +## options related to plotting horizontally remapped climatologies of +## wind stress curl against control model results + +# comparison grid(s) on which to plot analysis +comparisonGrids = ['latlon', 'arctic_extended', 'antarctic_extended'] + + [timeSeriesTransport] ## options related to plotting time series of transport through transects transportGroups = ['Transport Transects', 'Arctic Transport Transects'] @@ -806,7 +815,10 @@ transportGroups = ['Transport Transects', 'Arctic Transport Transects'] # land_ice_mass_change : Mass anomaly due to land ice fluxes # land_ice_ssh_change : SSH anomaly due to land ice fluxes # land_ice_mass_flux_components : Mass fluxes from land ice -plotTypes = ['absolute_energy_error', 'absolute_salt_error', 'total_mass_change', 'land_ice_mass_flux_components', 'land_ice_mass_change'] +plotTypes = ['absolute_energy_error', 'absolute_salt_error', + 'total_mass_change', 'land_ice_mass_change', + 'land_ice_ssh_change', 'land_ice_mass_flux', + 'land_ice_mass_flux_components'] [timeSeriesArcticOceanRegions] @@ -825,3 +837,10 @@ regionNames = ['all'] # See "regionNames" in the antarcticRegions masks file in # regionMaskSubdirectory for details. regionNames = ['all'] + + +[climatologyMapMassFluxes] + +variables = ['riverRunoffFlux', 'iceRunoffFlux', 'snowFlux', 'rainFlux', + 'evaporationFlux', 'seaIceFreshWaterFlux', + 'landIceFreshwaterFlux', 'icebergFreshWaterFlux'] diff --git a/mpas_analysis/sea_ice/climatology_map_albedo.py b/mpas_analysis/sea_ice/climatology_map_albedo.py index 685838377..3d313d8f5 100755 --- a/mpas_analysis/sea_ice/climatology_map_albedo.py +++ b/mpas_analysis/sea_ice/climatology_map_albedo.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -250,8 +250,8 @@ def _compute_albedo(self, climatology): """ Compute the albedo """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) albedo = climatology['timeMonthly_avg_broadbandAlbedo'] diff --git a/mpas_analysis/sea_ice/climatology_map_area_pond.py b/mpas_analysis/sea_ice/climatology_map_area_pond.py index 516c28d17..06cc8ba65 100755 --- a/mpas_analysis/sea_ice/climatology_map_area_pond.py +++ b/mpas_analysis/sea_ice/climatology_map_area_pond.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -250,8 +250,8 @@ def _compute_pondarea(self, climatology): """ Compute the melt pond area fraction """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) pondarea = climatology['timeMonthly_avg_meltPondAreaFinalArea'] diff --git a/mpas_analysis/sea_ice/climatology_map_area_ridge.py b/mpas_analysis/sea_ice/climatology_map_area_ridge.py index 953a4863b..331f097a5 100755 --- a/mpas_analysis/sea_ice/climatology_map_area_ridge.py +++ b/mpas_analysis/sea_ice/climatology_map_area_ridge.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -252,14 +252,13 @@ def customize_masked_climatology(self, climatology, season): def _compute_ridgefraction(self, climatology): """ - Compute the mean ridge thickness in m + Compute the mean ridge thickness in m """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) ridgearea = climatology['timeMonthly_avg_ridgedIceAreaAverage'] - area = climatology['timeMonthly_avg_iceAreaCell'] ridgefraction = ridgearea # area fraction of sea ice -# ridgefraction = ridgearea*area # area fraction of grid cell + return ridgefraction diff --git a/mpas_analysis/sea_ice/climatology_map_berg_conc.py b/mpas_analysis/sea_ice/climatology_map_berg_conc.py index 68bca886b..a2d97772c 100644 --- a/mpas_analysis/sea_ice/climatology_map_berg_conc.py +++ b/mpas_analysis/sea_ice/climatology_map_berg_conc.py @@ -35,7 +35,7 @@ def __init__(self, config, mpasClimatologyTask, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` @@ -44,7 +44,7 @@ def __init__(self, config, mpasClimatologyTask, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -193,9 +193,9 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates - obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, - latVarName='latitude', - lonVarName='longitude') + obsDescriptor = LatLonGridDescriptor.read(filename=fileName, + lat_var_name='latitude', + lon_var_name='longitude') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/sea_ice/climatology_map_melting.py b/mpas_analysis/sea_ice/climatology_map_melting.py index 4eeec5b24..713156df6 100755 --- a/mpas_analysis/sea_ice/climatology_map_melting.py +++ b/mpas_analysis/sea_ice/climatology_map_melting.py @@ -35,7 +35,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -44,7 +44,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -254,8 +254,8 @@ def _compute_melting(self, climatology): Compute the total sea ice melting in m yr^-1 from the individual melt fields in m s^-1. """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) units_scale_factor = 60 * 60 * 24 * 365 @@ -266,6 +266,7 @@ def _compute_melting(self, climatology): melting = (basal + surface + lateral) * units_scale_factor return melting + class RemapAnIceFluxMeltingClimatology(RemapObservedClimatologySubtask): """ A subtask for reading and remapping sea ice melting from AnIceFlux @@ -295,9 +296,9 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates - obsDescriptor = LatLon2DGridDescriptor.read(fileName=fileName, - latVarName='lat', - lonVarName='lon') + obsDescriptor = LatLon2DGridDescriptor.read(filename=fileName, + lat_var_name='lat', + lon_var_name='lon') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/sea_ice/climatology_map_production.py b/mpas_analysis/sea_ice/climatology_map_production.py index d151bded3..4bbea54af 100755 --- a/mpas_analysis/sea_ice/climatology_map_production.py +++ b/mpas_analysis/sea_ice/climatology_map_production.py @@ -35,7 +35,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -44,7 +44,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -254,8 +254,8 @@ def _compute_production(self, climatology): Compute the total sea ice production in m yr^-1 from the individual production fields in m s^-1. """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) units_scale_factor = 60 * 60 * 24 * 365 @@ -266,6 +266,7 @@ def _compute_production(self, climatology): production = (congelation + frazil + snowice) * units_scale_factor return production + class RemapAnIceFluxProductionClimatology(RemapObservedClimatologySubtask): """ A subtask for reading and remapping sea ice production from AnIceFlux @@ -295,9 +296,9 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates - obsDescriptor = LatLon2DGridDescriptor.read(fileName=fileName, - latVarName='lat', - lonVarName='lon') + obsDescriptor = LatLon2DGridDescriptor.read(filename=fileName, + lat_var_name='lat', + lon_var_name='lon') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/sea_ice/climatology_map_sea_ice_conc.py b/mpas_analysis/sea_ice/climatology_map_sea_ice_conc.py index 974cf6c58..9162638eb 100644 --- a/mpas_analysis/sea_ice/climatology_map_sea_ice_conc.py +++ b/mpas_analysis/sea_ice/climatology_map_sea_ice_conc.py @@ -38,7 +38,7 @@ def __init__(self, config, mpasClimatologyTask, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` @@ -47,7 +47,7 @@ def __init__(self, config, mpasClimatologyTask, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -184,7 +184,7 @@ def _add_obs_tasks(self, seasons, comparisonGridNames, hemisphere, galleryName='Observations: SSM/I {}'.format( prefix), maskMinThreshold=minConcentration, - extend='neither', + extend='both', prependComparisonGrid=False) self.add_subtask(subtask) @@ -231,7 +231,7 @@ def _add_ref_tasks(self, seasons, comparisonGridNames, hemisphere, groupLink='{}_conc'.format(hemisphere.lower()), galleryName=galleryName, maskMinThreshold=minConcentration, - extend='neither', + extend='both', prependComparisonGrid=False) self.add_subtask(subtask) @@ -265,9 +265,9 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates - obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, - latVarName='t_lat', - lonVarName='t_lon') + obsDescriptor = LatLonGridDescriptor.read(filename=fileName, + lat_var_name='t_lat', + lon_var_name='t_lon') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/sea_ice/climatology_map_sea_ice_thick.py b/mpas_analysis/sea_ice/climatology_map_sea_ice_thick.py index 752c682c3..9f033ec7f 100644 --- a/mpas_analysis/sea_ice/climatology_map_sea_ice_thick.py +++ b/mpas_analysis/sea_ice/climatology_map_sea_ice_thick.py @@ -38,7 +38,7 @@ def __init__(self, config, mpasClimatologyTask, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasClimatologyTask : ``MpasClimatologyTask`` @@ -47,7 +47,7 @@ def __init__(self, config, mpasClimatologyTask, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - controlConfig : mpas_tools.config.MpasConfigParser, optional + controlConfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -166,7 +166,7 @@ def __init__(self, config, mpasClimatologyTask, hemisphere, groupLink=f'{hemisphere.lower()}_thick', galleryName=galleryName, maskMinThreshold=0, - extend='neither', + extend='both', prependComparisonGrid=False) self.add_subtask(subtask) @@ -200,9 +200,9 @@ def get_observation_descriptor(self, fileName): # create a descriptor of the observation grid using the lat/lon # coordinates - obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, - latVarName='t_lat', - lonVarName='t_lon') + obsDescriptor = LatLonGridDescriptor.read(filename=fileName, + lat_var_name='t_lat', + lon_var_name='t_lon') return obsDescriptor def build_observational_dataset(self, fileName): diff --git a/mpas_analysis/sea_ice/climatology_map_snow_depth.py b/mpas_analysis/sea_ice/climatology_map_snow_depth.py index 9511b7700..1d08f75a5 100755 --- a/mpas_analysis/sea_ice/climatology_map_snow_depth.py +++ b/mpas_analysis/sea_ice/climatology_map_snow_depth.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -250,14 +250,12 @@ def customize_masked_climatology(self, climatology, season): def _compute_snowdepth(self, climatology): """ - Compute the snow depth in m + Compute the snow depth in m """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) volume = climatology['timeMonthly_avg_snowVolumeCell'] - area = climatology['timeMonthly_avg_iceAreaCell'] snowdepth = volume # volume per unit grid cell area (m) -# snowdepth = volume/area # volume per unit sea ice area (m) return snowdepth diff --git a/mpas_analysis/sea_ice/climatology_map_snowice.py b/mpas_analysis/sea_ice/climatology_map_snowice.py index 595401138..c5b3888be 100755 --- a/mpas_analysis/sea_ice/climatology_map_snowice.py +++ b/mpas_analysis/sea_ice/climatology_map_snowice.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -255,8 +255,8 @@ def _compute_snowice(self, climatology): """ Compute the snow-ice formation rate in m yr^-1 """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) units_scale_factor = 60 * 60 * 24 * 365 diff --git a/mpas_analysis/sea_ice/climatology_map_snowmelt.py b/mpas_analysis/sea_ice/climatology_map_snowmelt.py index 74bd49724..6942c3b9f 100755 --- a/mpas_analysis/sea_ice/climatology_map_snowmelt.py +++ b/mpas_analysis/sea_ice/climatology_map_snowmelt.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -250,8 +250,8 @@ def _compute_snowmelt(self, climatology): """ Compute the snow melt rate in m yr^-1 """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) units_scale_factor = 60 * 60 * 24 * 365 diff --git a/mpas_analysis/sea_ice/climatology_map_tendency_area_thermo.py b/mpas_analysis/sea_ice/climatology_map_tendency_area_thermo.py index 0942b3632..03f34a7ed 100755 --- a/mpas_analysis/sea_ice/climatology_map_tendency_area_thermo.py +++ b/mpas_analysis/sea_ice/climatology_map_tendency_area_thermo.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -251,10 +251,13 @@ def _compute_tendency(self, climatology): """ Compute the tendency in area fraction/yr """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) units_scale_factor = 60 * 60 * 24 * 365 - tendency = climatology['timeMonthly_avg_iceAreaTendencyThermodynamics'] * units_scale_factor + tendency = ( + climatology['timeMonthly_avg_iceAreaTendencyThermodynamics'] * + units_scale_factor + ) return tendency diff --git a/mpas_analysis/sea_ice/climatology_map_tendency_area_transp.py b/mpas_analysis/sea_ice/climatology_map_tendency_area_transp.py index 4986163e1..58d17a4c4 100755 --- a/mpas_analysis/sea_ice/climatology_map_tendency_area_transp.py +++ b/mpas_analysis/sea_ice/climatology_map_tendency_area_transp.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -251,10 +251,13 @@ def _compute_tendency(self, climatology): """ Compute the tendency in fraction/yr """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) units_scale_factor = 60 * 60 * 24 * 365 - tendency = climatology['timeMonthly_avg_iceAreaTendencyTransport'] * units_scale_factor + tendency = ( + climatology['timeMonthly_avg_iceAreaTendencyTransport'] * + units_scale_factor + ) return tendency diff --git a/mpas_analysis/sea_ice/climatology_map_tendency_volume_thermo.py b/mpas_analysis/sea_ice/climatology_map_tendency_volume_thermo.py index 2045cbf74..b9437b6dc 100755 --- a/mpas_analysis/sea_ice/climatology_map_tendency_volume_thermo.py +++ b/mpas_analysis/sea_ice/climatology_map_tendency_volume_thermo.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -252,10 +252,13 @@ def _compute_tendency(self, climatology): """ Compute the tendency in m/yr """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) units_scale_factor = 60 * 60 * 24 * 365 - tendency = climatology['timeMonthly_avg_iceVolumeTendencyThermodynamics'] * units_scale_factor + tendency = ( + climatology['timeMonthly_avg_iceVolumeTendencyThermodynamics'] * + units_scale_factor + ) return tendency diff --git a/mpas_analysis/sea_ice/climatology_map_tendency_volume_transp.py b/mpas_analysis/sea_ice/climatology_map_tendency_volume_transp.py index f91fa7aa9..4919d7538 100755 --- a/mpas_analysis/sea_ice/climatology_map_tendency_volume_transp.py +++ b/mpas_analysis/sea_ice/climatology_map_tendency_volume_transp.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -251,10 +251,13 @@ def _compute_tendency(self, climatology): """ Compute the tendency in m/yr """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) units_scale_factor = 60 * 60 * 24 * 365 - tendency = climatology['timeMonthly_avg_iceVolumeTendencyTransport'] * units_scale_factor + tendency = ( + climatology['timeMonthly_avg_iceVolumeTendencyTransport'] * + units_scale_factor + ) return tendency diff --git a/mpas_analysis/sea_ice/climatology_map_volume_ridge.py b/mpas_analysis/sea_ice/climatology_map_volume_ridge.py index 987edbd91..3e85cb904 100755 --- a/mpas_analysis/sea_ice/climatology_map_volume_ridge.py +++ b/mpas_analysis/sea_ice/climatology_map_volume_ridge.py @@ -34,7 +34,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask @@ -43,7 +43,7 @@ def __init__(self, config, mpas_climatology_task, hemisphere, hemisphere : {'NH', 'SH'} The hemisphere to plot - control_config : mpas_tools.config.MpasConfigParser, optional + control_config : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -251,14 +251,12 @@ def customize_masked_climatology(self, climatology, season): def _compute_volumeridge(self, climatology): """ - Compute the mean ridge thickness in m + Compute the mean ridge thickness in m """ - ds_restart = xr.open_dataset(self.restartFileName) - ds_restart = ds_restart.isel(Time=0) + ds_mesh = xr.open_dataset(self.meshFilename) + ds_mesh = ds_mesh.isel(Time=0) volume = climatology['timeMonthly_avg_ridgedIceVolumeAverage'] - area = climatology['timeMonthly_avg_iceAreaCell'] volumeridge = volume # volume per unit sea ice area (m) -# volumeridge = volume*area # volume per unit grid cell area (m) return volumeridge diff --git a/mpas_analysis/sea_ice/time_series.py b/mpas_analysis/sea_ice/time_series.py index a3a77c23a..b0e0fcca0 100644 --- a/mpas_analysis/sea_ice/time_series.py +++ b/mpas_analysis/sea_ice/time_series.py @@ -41,7 +41,7 @@ class TimeSeriesSeaIce(AnalysisTask): mpasTimeSeriesTask : ``MpasTimeSeriesTask`` The task that extracts the time series from MPAS monthly output - controlconfig : mpas_tools.config.MpasConfigParser + controlconfig : tranche.Tranche Configuration options for a control run (if any) """ @@ -56,13 +56,13 @@ def __init__(self, config, mpasTimeSeriesTask, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options mpasTimeSeriesTask : ``MpasTimeSeriesTask`` The task that extracts the time series from MPAS monthly output - controlconfig : mpas_tools.config.MpasConfigParser, optional + controlconfig : tranche.Tranche, optional Configuration options for a control run (if any) """ # Authors @@ -135,12 +135,7 @@ def setup_and_check(self): self.simulationStartTime = get_simulation_start_time(self.runStreams) - try: - self.restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-SeaIce restart file found: need at least ' - 'one restart file to perform remapping of ' - 'climatologies.') + self.meshFilename = self.get_mesh_filename() # these are redundant for now. Later cleanup is needed where these # file names are reused in run() @@ -667,7 +662,7 @@ def _compute_area_vol(self): outFileNames[hemisphere] = outFileName dsTimeSeries = {} - dsMesh = xr.open_dataset(self.restartFileName) + dsMesh = xr.open_dataset(self.meshFilename) dsMesh = dsMesh[['latCell', 'areaCell']] # Load data ds = open_mpas_dataset( diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index 21c480b2a..f47660658 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -19,6 +19,7 @@ import time import traceback import logging +import os import sys from mpas_analysis.shared.io import NameList, StreamsFile @@ -32,7 +33,7 @@ class AnalysisTask(Process): Attributes ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options taskName : str @@ -109,7 +110,7 @@ def __init__(self, config, taskName, componentName, tags=[], Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options taskName : str @@ -491,6 +492,39 @@ def set_start_end_date(self, section): self.config.set(section, 'endDate', endDate) + def get_mesh_filename(self): + """ + Get the name of the MPAS mesh file for this component. + + Returns + ------- + meshFilename : str + The name of the MPAS mesh file for this component + """ + # Authors + # ------- + # Xylar Asay-Davis + + meshStream = self.config.get(self.componentName, 'meshStream') + try: + meshFilename = self.runStreams.readpath(meshStream)[0] + except ValueError: + meshFilename = None + + if meshFilename is None or not os.path.exists(meshFilename): + # try again with "restart" stream + try: + meshFilename = self.runStreams.readpath('restart')[0] + except ValueError: + meshFilename = None + + if meshFilename is None or not os.path.exists(meshFilename): + raise IOError( + f'The MPAS mesh file could not be found via either ' + f'"{meshStream}" or "restart" streams') + + return meshFilename + # }}} diff --git a/mpas_analysis/shared/climatology/climatology.py b/mpas_analysis/shared/climatology/climatology.py index b1ff93871..6436f7d80 100644 --- a/mpas_analysis/shared/climatology/climatology.py +++ b/mpas_analysis/shared/climatology/climatology.py @@ -47,7 +47,7 @@ def get_remapper(config, sourceDescriptor, comparisonDescriptor, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options sourceDescriptor : pyremap.MeshDescriptor @@ -84,10 +84,10 @@ def get_remapper(config, sourceDescriptor, comparisonDescriptor, # we need to remap because the grids don't match if vertices: - srcMeshName = f'{sourceDescriptor.meshName}_vertices' + srcMeshName = f'{sourceDescriptor.mesh_name}_vertices' else: - srcMeshName = sourceDescriptor.meshName - destMeshName = comparisonDescriptor.meshName + srcMeshName = sourceDescriptor.mesh_name + destMeshName = comparisonDescriptor.mesh_name mappingBaseName = \ f'{mappingFilePrefix}_{srcMeshName}_to_{destMeshName}_{method}.nc' @@ -119,22 +119,33 @@ def get_remapper(config, sourceDescriptor, comparisonDescriptor, make_directories(mappingSubdirectory) mappingFileName = f'{mappingSubdirectory}/{mappingBaseName}' - remapper = Remapper(sourceDescriptor, comparisonDescriptor, - mappingFileName) - mpiTasks = config.getint('execute', 'mapMpiTasks') esmf_parallel_exec = config.get('execute', 'mapParallelExec') if esmf_parallel_exec == 'None': esmf_parallel_exec = None - mappingSubdirectory = \ - build_config_full_path(config, 'output', - 'mappingSubdirectory') - make_directories(mappingSubdirectory) - with TemporaryDirectory(dir=mappingSubdirectory) as tempdir: - remapper.build_mapping_file(method=method, logger=logger, - mpiTasks=mpiTasks, tempdir=tempdir, - esmf_parallel_exec=esmf_parallel_exec) + remapper = Remapper( + ntasks=mpiTasks, + map_filename=mappingFileName, + method=method, + parallel_exec=esmf_parallel_exec, + src_descriptor=sourceDescriptor, + dst_descriptor=comparisonDescriptor, + ) + + if mappingFileName is not None and not os.path.exists(mappingFileName): + mappingSubdirectory = \ + build_config_full_path( + config, 'output', 'mappingSubdirectory') + make_directories(mappingSubdirectory) + with TemporaryDirectory(dir=mappingSubdirectory) as tempdir: + remapper.src_scrip_filename = os.path.join( + tempdir, remapper.src_scrip_filename) + remapper.dst_scrip_filename = os.path.join( + tempdir, remapper.dst_scrip_filename) + + # TEMP: logger not supported in this RC + remapper.build_map(logger=logger) return remapper @@ -323,7 +334,7 @@ def remap_and_write_climatology(config, climatologyDataSet, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options climatologyDataSet : ``xarray.DataSet`` or ``xarray.DataArray`` object @@ -358,7 +369,7 @@ def remap_and_write_climatology(config, climatologyDataSet, useNcremap = config.getboolean('climatology', 'useNcremap') - if remapper.mappingFileName is None: + if remapper.map_filename is None: # no remapping is needed remappedClimatology = climatologyDataSet else: @@ -372,17 +383,18 @@ def remap_and_write_climatology(config, climatologyDataSet, if useNcremap: if not os.path.exists(climatologyFileName): write_netcdf(climatologyDataSet, climatologyFileName) - remapper.remap_file(inFileName=climatologyFileName, - outFileName=remappedFileName, - overwrite=True, - renormalize=renormalizationThreshold, - logger=logger, - parallel_exec=parallel_exec) + remapper.ncremap( + in_filename=climatologyFileName, + out_filename=remappedFileName, + overwrite=True, + renormalize=renormalizationThreshold, + logger=logger, + parallel_exec=parallel_exec) remappedClimatology = xr.open_dataset(remappedFileName) else: - remappedClimatology = remapper.remap(climatologyDataSet, - renormalizationThreshold) + remappedClimatology = remapper.remap_numpy( + climatologyDataSet, renormalizationThreshold) write_netcdf_with_fill(remappedClimatology, remappedFileName) return remappedClimatology @@ -394,7 +406,7 @@ def get_unmasked_mpas_climatology_directory(config, op='avg'): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche configuration options op : {'avg', 'min', 'max'} @@ -422,7 +434,7 @@ def get_unmasked_mpas_climatology_file_name(config, season, componentName, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche configuration options season : str @@ -474,7 +486,7 @@ def get_masked_mpas_climatology_file_name(config, season, componentName, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options season : str @@ -541,7 +553,7 @@ def get_remapped_mpas_climatology_file_name(config, season, componentName, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options season : str @@ -584,7 +596,7 @@ def get_remapped_mpas_climatology_file_name(config, season, componentName, if comparisonGridName in known_comparison_grids: comparisonDescriptor = get_comparison_descriptor(config, comparisonGridName) - comparisonFullMeshName = comparisonDescriptor.meshName + comparisonFullMeshName = comparisonDescriptor.mesh_name else: comparisonFullMeshName = comparisonGridName.replace(' ', '_') @@ -677,7 +689,7 @@ def _matches_comparison(obsDescriptor, comparisonDescriptor): isinstance(comparisonDescriptor, ProjectionGridDescriptor): # pretty hard to determine if projections are the same, so we'll rely # on the grid names - match = obsDescriptor.meshName == comparisonDescriptor.meshName and \ + match = obsDescriptor.mesh_name == comparisonDescriptor.mesh_name and \ len(obsDescriptor.x) == len(comparisonDescriptor.x) and \ len(obsDescriptor.y) == len(comparisonDescriptor.y) and \ numpy.all(numpy.isclose(obsDescriptor.x, diff --git a/mpas_analysis/shared/climatology/comparison_descriptors.py b/mpas_analysis/shared/climatology/comparison_descriptors.py index 92a0b6a21..25ccd099a 100644 --- a/mpas_analysis/shared/climatology/comparison_descriptors.py +++ b/mpas_analysis/shared/climatology/comparison_descriptors.py @@ -34,7 +34,7 @@ def get_comparison_descriptor(config, comparison_grid_name): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options comparison_grid_name : {'latlon', 'antarctic', 'arctic', 'north_atlantic', @@ -71,7 +71,7 @@ def _get_lat_lon_comparison_descriptor(config): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options Returns @@ -105,7 +105,7 @@ def _get_projection_comparison_descriptor(config, comparison_grid_name): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options comparison_grid_name : str diff --git a/mpas_analysis/shared/climatology/mpas_climatology_task.py b/mpas_analysis/shared/climatology/mpas_climatology_task.py index 2ac27c0ed..c0414a287 100644 --- a/mpas_analysis/shared/climatology/mpas_climatology_task.py +++ b/mpas_analysis/shared/climatology/mpas_climatology_task.py @@ -86,7 +86,7 @@ def __init__(self, config, componentName, taskName=None, op='avg'): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options componentName : {'ocean', 'seaIce'} @@ -506,7 +506,7 @@ def _compute_climatologies_with_ncclimo(self, inDirectory, outDirectory, '-o', outDirectory] + inFiles if remapper is not None: - args.extend(['-r', remapper.mappingFileName]) + args.extend(['-r', remapper.map_filename]) if remappedDirectory is not None: args.extend(['-O', remappedDirectory]) diff --git a/mpas_analysis/shared/climatology/ref_year_mpas_climatology_task.py b/mpas_analysis/shared/climatology/ref_year_mpas_climatology_task.py index e186734a8..989d5257c 100644 --- a/mpas_analysis/shared/climatology/ref_year_mpas_climatology_task.py +++ b/mpas_analysis/shared/climatology/ref_year_mpas_climatology_task.py @@ -11,7 +11,7 @@ from io import StringIO -from mpas_tools.config import MpasConfigParser +from tranche import Tranche from mpas_analysis.shared.climatology import MpasClimatologyTask from mpas_analysis.shared.timekeeping.utility import get_simulation_start_time @@ -39,7 +39,7 @@ def __init__(self, config, componentName, taskName=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options componentName : {'ocean', 'seaIce'} diff --git a/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py b/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py index 00c286b8e..702f7f56e 100644 --- a/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py +++ b/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py @@ -62,8 +62,8 @@ class RemapMpasClimatologySubtask(AnalysisTask): Descriptors of the comparison grids to use for remapping, with grid names as the keys. - restartFileName : str - If ``comparisonGridName`` is not ``None``, the name of a restart + meshFilename : str + If ``comparisonGridName`` is not ``None``, the name of the mesh file from which the MPAS mesh can be read. useNcremap : bool, optional @@ -182,6 +182,7 @@ def __init__(self, mpasClimatologyTask, parentTask, climatologyName, self.useNcremap = useNcremap self.vertices = vertices + self.meshFilename = None def setup_and_check(self): """ @@ -205,12 +206,7 @@ def setup_and_check(self): # self.calendar super(RemapMpasClimatologySubtask, self).setup_and_check() - try: - self.restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS restart file found: need at least one ' - 'restart file to perform remapping of ' - 'climatologies.') + self.meshFilename = self.get_mesh_filename() # we set up the remapper here because ESFM_RegridWeightGen seems to # have trouble if it runs in another process (or in several at once) @@ -415,15 +411,15 @@ def _setup_remappers(self): for comparisonGridName in self.comparisonDescriptors: comparisonDescriptor = \ self.comparisonDescriptors[comparisonGridName] - self.comparisonGridName = comparisonDescriptor.meshName + self.comparisonGridName = comparisonDescriptor.mesh_name meshName = config.get('input', 'mpasMeshName') if self.vertices: mpasDescriptor = MpasVertexMeshDescriptor( - self.restartFileName, meshName=meshName) + self.meshFilename, mesh_name=meshName) else: mpasDescriptor = MpasCellMeshDescriptor( - self.restartFileName, meshName=meshName) - self.mpasMeshName = mpasDescriptor.meshName + self.meshFilename, mesh_name=meshName) + self.mpasMeshName = mpasDescriptor.mesh_name self.remappers[comparisonGridName] = get_remapper( config=config, sourceDescriptor=mpasDescriptor, @@ -451,7 +447,7 @@ def _setup_file_names(self): comparisonDescriptor = \ self.comparisonDescriptors[comparisonGridName] comparisonFullMeshNames[comparisonGridName] = \ - comparisonDescriptor.meshName + comparisonDescriptor.mesh_name keys = [] for season in self.seasons: @@ -588,7 +584,7 @@ def _remap(self, inFileName, outFileName, remapper, comparisonGridName, # ------- # Xylar Asay-Davis - if remapper.mappingFileName is None: + if remapper.map_filename is None: # no remapping is needed return @@ -603,20 +599,21 @@ def _remap(self, inFileName, outFileName, remapper, comparisonGridName, if self.useNcremap: basename, ext = os.path.splitext(outFileName) ncremapFilename = f'{basename}_ncremap{ext}' - remapper.remap_file(inFileName=inFileName, - outFileName=ncremapFilename, - overwrite=True, - renormalize=renormalizationThreshold, - logger=self.logger, - parallel_exec=parallel_exec) + remapper.ncremap( + in_filename=inFileName, + out_filename=ncremapFilename, + overwrite=True, + renormalize=renormalizationThreshold, + logger=self.logger, + parallel_exec=parallel_exec) remappedClimatology = xr.open_dataset(ncremapFilename) else: climatologyDataSet = xr.open_dataset(inFileName) - remappedClimatology = remapper.remap(climatologyDataSet, - renormalizationThreshold) + remappedClimatology = remapper.remap_numpy( + climatologyDataSet, renormalizationThreshold) # customize (if this function has been overridden) remappedClimatology = self.customize_remapped_climatology( diff --git a/mpas_analysis/shared/climatology/remap_observed_climatology_subtask.py b/mpas_analysis/shared/climatology/remap_observed_climatology_subtask.py index 3deebaab1..e97bad18d 100644 --- a/mpas_analysis/shared/climatology/remap_observed_climatology_subtask.py +++ b/mpas_analysis/shared/climatology/remap_observed_climatology_subtask.py @@ -175,7 +175,7 @@ def run_task(self): remapper = self.remappers[comparisonGridName] - if remapper.mappingFileName is None: + if remapper.map_filename is None: # no need to remap because the observations are on the # comparison grid already os.symlink(climatologyFileName, remappedFileName) @@ -266,7 +266,7 @@ def get_file_name(self, stage, season=None, comparisonGridName=None): else: remapper = self.remappers[comparisonGridName] - obsGridName = remapper.sourceDescriptor.meshName + obsGridName = remapper.src_descriptor.mesh_name outFilePrefix = self.outFilePrefix @@ -293,7 +293,7 @@ def get_file_name(self, stage, season=None, comparisonGridName=None): make_directories(remappedDirectory) - comparisonGridName = remapper.destinationDescriptor.meshName + comparisonGridName = remapper.dst_descriptor.mesh_name fileName = '{}/{}_{}_to_{}_{}.nc'.format( remappedDirectory, outFilePrefix, obsGridName, comparisonGridName, season) diff --git a/mpas_analysis/shared/generalized_reader/generalized_reader.py b/mpas_analysis/shared/generalized_reader/generalized_reader.py index 6c47c929a..ae0ece8f9 100644 --- a/mpas_analysis/shared/generalized_reader/generalized_reader.py +++ b/mpas_analysis/shared/generalized_reader/generalized_reader.py @@ -50,7 +50,7 @@ def open_multifile_dataset(fileNames, calendar, config, calendar : {``'gregorian'``, ``'noleap'``}, optional The name of one of the calendars supported by MPAS cores - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options simulationStartTime : string, optional diff --git a/mpas_analysis/shared/html/image_xml.py b/mpas_analysis/shared/html/image_xml.py index 71340c807..6fe894efa 100644 --- a/mpas_analysis/shared/html/image_xml.py +++ b/mpas_analysis/shared/html/image_xml.py @@ -33,7 +33,7 @@ def write_image_xml(config, filePrefix, componentName, componentSubdirectory, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche contains config options filePrefix : str diff --git a/mpas_analysis/shared/html/pages.py b/mpas_analysis/shared/html/pages.py index 883940dec..2a9097e0f 100644 --- a/mpas_analysis/shared/html/pages.py +++ b/mpas_analysis/shared/html/pages.py @@ -16,7 +16,7 @@ from os import makedirs from pathlib import Path -import pkg_resources +import importlib.resources as resources from lxml import etree import mpas_analysis.version @@ -29,7 +29,7 @@ def generate_html(config, analyses, controlConfig, customConfigFiles): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Config options analysis : ``OrderedDict`` of ``AnalysisTask`` objects @@ -38,7 +38,7 @@ def generate_html(config, analyses, controlConfig, customConfigFiles): the list of files to include on the webpage for the associated component. - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Config options for a control run customConfigFiles : list of str @@ -66,13 +66,13 @@ def generate_html(config, analyses, controlConfig, customConfigFiles): try: ComponentPage.add_image(fileName, config, components, controlConfig) - except IOError: - print(' missing file {}'.format(fileName)) + except IOError as e: + print(f'Error reading {fileName}: {e}') missingCount += 1 if missingCount > 0: - print('Warning: {} XML files were missing and the analysis website' - ' will be incomplete.'.format(missingCount)) + print(f'Warning: {missingCount} XML files could not be read and the ' + f'analysis website will be incomplete.') # generate the page for each component and add the component to the main # page for componentName, component in components.items(): @@ -108,10 +108,10 @@ class MainPage(object): Attributes ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Config options - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Config options for a control run customConfigFiles : list of str @@ -136,10 +136,10 @@ def __init__(self, config, controlConfig, customConfigFiles): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Config options - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Config options for a control run customConfigFiles : list of str @@ -155,24 +155,15 @@ def __init__(self, config, controlConfig, customConfigFiles): self.customConfigFiles = customConfigFiles # get template text - fileName = \ - pkg_resources.resource_filename(__name__, - "templates/main_page.html") - - with open(fileName, 'r') as templateFile: - self.pageTemplate = templateFile.read() - - fileName = \ - pkg_resources.resource_filename(__name__, - "templates/main_component.html") - with open(fileName, 'r') as templateFile: - self.componentTemplate = templateFile.read() - - fileName = \ - pkg_resources.resource_filename(__name__, - "templates/config.html") - with open(fileName, 'r') as templateFile: - self.configTemplate = templateFile.read() + package = 'mpas_analysis.shared.html.templates' + templates = {} + for name in ['main_page', 'main_component', 'config']: + resource = resources.files(package).joinpath(f'{name}.html') + with resource.open('r') as templateFile: + templates[name] = templateFile.read() + self.pageTemplate = templates['main_page'] + self.componentTemplate = templates['main_component'] + self.configTemplate = templates['config'] # start with no components self.components = OrderedDict() @@ -273,45 +264,34 @@ def generate(self): pageText = _replace_tempate_text(self.pageTemplate, replacements) - htmlBaseDirectory = build_config_full_path(self.config, 'output', - 'htmlSubdirectory') + htmlBaseDirectory = build_config_full_path( + self.config, 'output', 'htmlSubdirectory' + ) for subdir in ['css', 'js']: - try: - makedirs('{}/{}'.format(htmlBaseDirectory, subdir)) - except OSError: - pass + makedirs(f'{htmlBaseDirectory}/{subdir}', exist_ok=True) - outFileName = '{}/index.html'.format(htmlBaseDirectory) + outFileName = f'{htmlBaseDirectory}/index.html' with open(outFileName, mode='w') as mainFile: mainFile.write( - pageText.encode('ascii', - 'xmlcharrefreplace').decode('ascii')) + pageText.encode('ascii', 'xmlcharrefreplace').decode('ascii') + ) # copy the css and js files as well as general images - fileName = \ - pkg_resources.resource_filename(__name__, - "templates/style.css") - copyfile(fileName, '{}/css/style.css'.format(htmlBaseDirectory)) - - fileName = \ - pkg_resources.resource_filename(__name__, - "templates/index.js") - copyfile(fileName, '{}/js/index.js'.format(htmlBaseDirectory)) - - fileName = \ - pkg_resources.resource_filename(__name__, - "templates/mpas_logo.png") - copyfile(fileName, '{}/mpas_logo.png'.format(htmlBaseDirectory)) - - fileName = \ - pkg_resources.resource_filename(__name__, - "templates/config.png") - copyfile(fileName, '{}/config.png'.format(htmlBaseDirectory)) - - with open('{}/complete.{}.cfg'.format(htmlBaseDirectory, - runName), 'w') as configFile: + resource_targets = [ + ("style.css", "css/style.css"), + ("index.js", "js/index.js"), + ("mpas_logo.png", "mpas_logo.png"), + ("config.png", "config.png"), + ] + for resource_name, target_path in resource_targets: + package = 'mpas_analysis.shared.html.templates' + fileName = resources.files(package).joinpath(resource_name) + copyfile(str(fileName), f'{htmlBaseDirectory}/{target_path}') + + outFileName = f'{htmlBaseDirectory}/complete.{runName}.cfg' + with open(outFileName, 'w') as configFile: self.config.write(configFile) @@ -322,10 +302,10 @@ class ComponentPage(object): Attributes ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Config options - controlConfig : mpas_tools.config.MpasConfigParser + controlConfig : tranche.Tranche Config options for a control run name : str @@ -354,7 +334,7 @@ def __init__(self, config, name, subdirectory, controlConfig=None): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Config options name : str @@ -365,7 +345,7 @@ def __init__(self, config, name, subdirectory, controlConfig=None): subdirecory : str The subdirectory for the component's webpage - controlConfig : mpas_tools.config.MpasConfigParser, optional + controlConfig : tranche.Tranche, optional Config options for a control run """ # Authors @@ -386,11 +366,10 @@ def __init__(self, config, name, subdirectory, controlConfig=None): for templateName in ['page', 'quicklink', 'group', 'gallery', 'image', 'subtitle']: # get template text - fileName = pkg_resources.resource_filename( - __name__, - "templates/component_{}.html".format(templateName)) - - with open(fileName, 'r') as templateFile: + package = 'mpas_analysis.shared.html.templates' + resource = resources.files(package).joinpath( + f'component_{templateName}.html') + with resource.open('r') as templateFile: self.templates[templateName] = templateFile.read() # start with no groups @@ -408,7 +387,7 @@ def add_image(xmlFileName, config, components, controlConfig=None): xmlFileName : str The full path to the XML file describing the image to be added - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche contains config options components : OrederdDict of dict @@ -417,7 +396,7 @@ def add_image(xmlFileName, config, components, controlConfig=None): be added. ``components`` should be viewed as an input and output parameter, since it is modified by this function. - controlConfig : mpas_tools.config.MpasConfigParser, optional + controlConfig : tranche.Tranche, optional Config options for a control run """ # Authors diff --git a/mpas_analysis/shared/html/templates/__init__.py b/mpas_analysis/shared/html/templates/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mpas_analysis/shared/io/utility.py b/mpas_analysis/shared/io/utility.py index 9c99c0f49..a5e3a98b7 100644 --- a/mpas_analysis/shared/io/utility.py +++ b/mpas_analysis/shared/io/utility.py @@ -112,7 +112,7 @@ def build_config_full_path(config, section, relativePathOption, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche configuration from which to read the path section : str @@ -163,7 +163,7 @@ def get_region_mask(config, regionMaskFile): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche configuration from which to read the path regionMaskFile : str @@ -223,7 +223,7 @@ def build_obs_path(config, component, relativePathOption=None, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche configuration from which to read the path component : {'ocean', 'seaIce', 'iceberg'} diff --git a/mpas_analysis/shared/io/write_netcdf.py b/mpas_analysis/shared/io/write_netcdf.py index 99079083a..3347614d2 100644 --- a/mpas_analysis/shared/io/write_netcdf.py +++ b/mpas_analysis/shared/io/write_netcdf.py @@ -56,4 +56,10 @@ def write_netcdf_with_fill(ds, fileName, fillValues=netCDF4.default_fillvals): if dtype.type is numpy.bytes_: encodingDict[variableName] = {'dtype': str} + unlimited_dims = ds.encoding.get('unlimited_dims', None) + if unlimited_dims is not None: + if isinstance(unlimited_dims, str): + unlimited_dims = {unlimited_dims} + unlimited_dims = [dim for dim in unlimited_dims if dim in ds.dims] + ds.encoding['unlimited_dims'] = set(unlimited_dims) ds.to_netcdf(fileName, encoding=encodingDict) diff --git a/mpas_analysis/shared/plot/climatology_map.py b/mpas_analysis/shared/plot/climatology_map.py index 02dbd9949..0eb1e6092 100644 --- a/mpas_analysis/shared/plot/climatology_map.py +++ b/mpas_analysis/shared/plot/climatology_map.py @@ -61,7 +61,7 @@ def plot_polar_comparison( Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche the configuration, containing a [plot] section with options that control plotting @@ -277,7 +277,7 @@ def plot_global_comparison( Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche the configuration, containing a [plot] section with options that control plotting @@ -475,7 +475,7 @@ def plot_projection_comparison( Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche the configuration, containing a [plot] section with options that control plotting @@ -663,8 +663,8 @@ def _plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, else: figsize = config.getexpression(section, 'threePanelHorizFigSize') subplots = [131, 132, 133] - latLines = config.getexpression(section, 'latLines', use_numpyfunc=True) - lonLines = config.getexpression(section, 'lonLines', use_numpyfunc=True) + latLines = config.getnumpy(section, 'latLines') + lonLines = config.getnumpy(section, 'lonLines') # put latitude labels on the left unless we're in a polar projection left_labels = projectionName not in ['arctic', 'antarctic'] diff --git a/mpas_analysis/shared/plot/colormap.py b/mpas_analysis/shared/plot/colormap.py index c20e4010a..1c2cf7238 100644 --- a/mpas_analysis/shared/plot/colormap.py +++ b/mpas_analysis/shared/plot/colormap.py @@ -101,9 +101,7 @@ def setup_colormap(config, configSectionName, suffix=''): option = f'contourLevels{suffix}' if config.has_option(configSectionName, option): - contours = config.getexpression(configSectionName, - option, - use_numpyfunc=True) + contours = config.getnumpy(configSectionName, option) if isinstance(contours, str) and contours == 'none': contours = None @@ -388,9 +386,7 @@ def _setup_colormap_and_norm(config, configSectionName, suffix=''): f'{configSectionName}') try: - ticks = config.getexpression( - configSectionName, f'colorbarTicks{suffix}', - use_numpyfunc=True) + ticks = config.getnumpy(configSectionName, f'colorbarTicks{suffix}') except configparser.NoOptionError: ticks = None @@ -431,14 +427,10 @@ def _setup_indexed_colormap(config, configSectionName, suffix=''): colormap = plt.get_cmap(config.get(configSectionName, f'colormapName{suffix}')) - indices = config.getexpression(configSectionName, - f'colormapIndices{suffix}', - use_numpyfunc=True) + indices = config.getnumpy(configSectionName, f'colormapIndices{suffix}') try: - levels = config.getexpression( - configSectionName, f'colorbarLevels{suffix}', - use_numpyfunc=True) + levels = config.getnumpy(configSectionName, f'colorbarLevels{suffix}') except configparser.NoOptionError: levels = None @@ -463,9 +455,7 @@ def _setup_indexed_colormap(config, configSectionName, suffix=''): norm = cols.BoundaryNorm(levels, colormap.N) try: - ticks = config.getexpression( - configSectionName, f'colorbarTicks{suffix}', - use_numpyfunc=True) + ticks = config.getnumpy(configSectionName, f'colorbarTicks{suffix}') except configparser.NoOptionError: ticks = levels diff --git a/mpas_analysis/shared/plot/plot_climatology_map_subtask.py b/mpas_analysis/shared/plot/plot_climatology_map_subtask.py index 7db492d19..f94c678a8 100644 --- a/mpas_analysis/shared/plot/plot_climatology_map_subtask.py +++ b/mpas_analysis/shared/plot/plot_climatology_map_subtask.py @@ -141,7 +141,7 @@ def __init__(self, parentTask, season, comparisonGridName, A second subtask for remapping another MPAS climatology to plot in the second panel and compare with in the third panel - controlConfig : mpas_tools.config.MpasConfigParser, optional + controlConfig : tranche.Tranche, optional Configuration options for a control run (if any) depth : {float, 'top', 'bot'}, optional @@ -603,8 +603,8 @@ def _plot_projection(self, remappedModelClimatology, comparisonDescriptor = get_comparison_descriptor( config, comparisonGridName) - x = comparisonDescriptor.xCorner - y = comparisonDescriptor.yCorner + x = comparisonDescriptor.x_corner + y = comparisonDescriptor.y_corner aspectRatio = (x[-1] - x[0])/(y[-1] - y[0]) diff --git a/mpas_analysis/shared/plot/save.py b/mpas_analysis/shared/plot/save.py index 2629397fd..5f6d73400 100644 --- a/mpas_analysis/shared/plot/save.py +++ b/mpas_analysis/shared/plot/save.py @@ -23,7 +23,7 @@ def savefig(filename, config, tight=True, pad_inches=0.1): filename : str the file name to be written - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options tight : bool, optional diff --git a/mpas_analysis/shared/plot/vertical_section.py b/mpas_analysis/shared/plot/vertical_section.py index e7f417350..04b57b5b2 100644 --- a/mpas_analysis/shared/plot/vertical_section.py +++ b/mpas_analysis/shared/plot/vertical_section.py @@ -18,8 +18,6 @@ import matplotlib import matplotlib.pyplot as plt -from matplotlib.tri import Triangulation -from mpl_toolkits.axes_grid1 import make_axes_locatable import xarray as xr import numpy as np @@ -38,13 +36,6 @@ def plot_vertical_section_comparison( colorMapSectionName, xCoords=None, zCoord=None, - triangulation_args=None, - xOutlineModel=None, - zOutlineModel=None, - xOutlineRef=None, - zOutlineRef=None, - xOutlineDiff=None, - zOutlineDiff=None, colorbarLabel=None, xlabels=None, ylabel=None, @@ -118,28 +109,6 @@ def plot_vertical_section_comparison( zCoord : xarray.DataArray, optional The z coordinates for the model, ref and diff arrays - triangulation_args : dict, optional - A dict of arguments to create a matplotlib.tri.Triangulation of the - transect that does not rely on it being on a logically rectangular grid. - The arguments rather than the triangulation itself are passed because - multiple triangulations with different masks are needed internally and - there is not an obvious mechanism for copying an existing triangulation. - If this option is provided, ``xCoords`` is only used for tick marks if - more than one x axis is requested, and ``zCoord`` will be ignored. - - xOutlineModel, zOutlineModel : numpy.ndarray, optional - pairs of points defining line segments that are used to outline the - valid region of the mesh for the model panel if ``outlineValid = True`` - and ``triangulation_args`` is not ``None`` - - xOutlineRef, zOutlineRef : numpy.ndarray, optional - Same as ``xOutlineModel`` and ``zOutlineModel`` but for the reference - panel - - xOutlineDiff, zOutlineDiff : numpy.ndarray, optional - Same as ``xOutlineModel`` and ``zOutlineModel`` but for the difference - panel - colorMapSectionName : str section name in ``config`` where color map info can be found. @@ -410,9 +379,6 @@ def plot_vertical_section_comparison( colorMapSectionName, xCoords=xCoords, zCoord=zCoord, - triangulation_args=triangulation_args, - xOutline=xOutlineModel, - zOutline=zOutlineModel, suffix=resultSuffix, colorbarLabel=colorbarLabel, title=title, @@ -461,9 +427,6 @@ def plot_vertical_section_comparison( colorMapSectionName, xCoords=xCoords, zCoord=zCoord, - triangulation_args=triangulation_args, - xOutline=xOutlineRef, - zOutline=zOutlineRef, suffix=resultSuffix, colorbarLabel=colorbarLabel, title=refTitle, @@ -504,9 +467,6 @@ def plot_vertical_section_comparison( colorMapSectionName, xCoords=xCoords, zCoord=zCoord, - triangulation_args=triangulation_args, - xOutline=xOutlineDiff, - zOutline=zOutlineDiff, suffix=diffSuffix, colorbarLabel=colorbarLabel, title=diffTitle, @@ -557,9 +517,6 @@ def plot_vertical_section( colorMapSectionName, xCoords=None, zCoord=None, - triangulation_args=None, - xOutline=None, - zOutline=None, suffix='', colorbarLabel=None, title=None, @@ -639,22 +596,6 @@ def plot_vertical_section( zCoord : xarray.DataArray, optional The z coordinates for the ``field`` - triangulation_args : dict, optional - A dict of arguments to create a matplotlib.tri.Triangulation of the - transect that does not rely on it being on a logically rectangular grid. - The arguments rather than the triangulation itself are passed because - multiple triangulations with different masks are needed internally and - there is not an obvious mechanism for copying an existing triangulation. - If this option is provided, ``xCoords`` is only used for tick marks if - more than one x axis is requested, and ``zCoord`` will be ignored. - - xOutline, zOutline : numpy.ndarray, optional - pairs of points defining line segments that are used to outline the - valid region of the mesh if ``outlineValid = True`` and - ``triangulation_args`` is not ``None`` - - - suffix : str, optional the suffix used for colorbar config options @@ -849,68 +790,38 @@ def plot_vertical_section( if len(xCoords) != len(xlabels): raise ValueError('Expected the same number of xCoords and xlabels') - if triangulation_args is None: - - x, y = xr.broadcast(xCoords[0], zCoord) - dims_in_field = all([dim in field.dims for dim in x.dims]) + x, y = xr.broadcast(xCoords[0], zCoord) + dims_in_field = all([dim in field.dims for dim in x.dims]) - if dims_in_field: - x = x.transpose(*field.dims) - y = y.transpose(*field.dims) - else: - xsize = list(x.sizes.values()) - fieldsize = list(field.sizes.values()) - if xsize[0] == fieldsize[0] + 1 and xsize[1] == fieldsize[1] + 1: - pass - elif xsize[0] == fieldsize[1] + 1 and xsize[1] == fieldsize[0] + 1: - x = x.transpose(x.dims[1], x.dims[0]) - y = y.transpose(y.dims[1], y.dims[0]) - else: - raise ValueError('Sizes of coords {}x{} and field {}x{} not ' - 'compatible.'.format(xsize[0], xsize[1], - fieldsize[0], - fieldsize[1])) - - # compute moving averages with respect to the x dimension - if movingAveragePoints is not None and movingAveragePoints != 1: - dim = field.dims[0] - field = field.rolling(dim={dim: movingAveragePoints}, - center=True).mean().dropna(dim, how='all') - x = x.rolling(dim={dim: movingAveragePoints}, - center=True).mean().dropna(dim, how='all') - y = y.rolling(dim={dim: movingAveragePoints}, - center=True).mean().dropna(dim, how='all') - - mask = field.notnull() - maskedTriangulation, unmaskedTriangulation = _get_triangulation( - x, y, mask) - if contourComparisonField is not None: - mask = field.notnull() - maskedComparisonTriangulation, _ = _get_triangulation(x, y, mask) - else: - maskedComparisonTriangulation = None + if dims_in_field: + x = x.transpose(*field.dims) + y = y.transpose(*field.dims) else: - mask = field.notnull() - triMask = np.logical_not(mask.values) - # if any node of a triangle is masked, the triangle is masked - triMask = np.amax(triMask, axis=1) - unmaskedTriangulation = Triangulation(**triangulation_args) - anythingToPlot = not np.all(triMask) - if anythingToPlot: - mask_args = dict(triangulation_args) - mask_args['mask'] = triMask - maskedTriangulation = Triangulation(**mask_args) - else: - maskedTriangulation = None - if contourComparisonField is not None: - mask = contourComparisonField.notnull() - triMask = np.logical_not(mask.values) - triMask = np.amax(triMask, axis=1) - mask_args = dict(triangulation_args) - mask_args['mask'] = triMask - maskedComparisonTriangulation = Triangulation(**mask_args) + xsize = list(x.sizes.values()) + fieldsize = list(field.sizes.values()) + if xsize[0] == fieldsize[0] + 1 and xsize[1] == fieldsize[1] + 1: + pass + elif xsize[0] == fieldsize[1] + 1 and xsize[1] == fieldsize[0] + 1: + x = x.transpose(x.dims[1], x.dims[0]) + y = y.transpose(y.dims[1], y.dims[0]) else: - maskedComparisonTriangulation = None + raise ValueError(f'Sizes of coords {xsize[0]}x{xsize[1]} and ' + f'field {fieldsize[0]}x{fieldsize[1]} not ' + f'compatible.') + + # compute moving averages with respect to the x dimension + if movingAveragePoints is not None and movingAveragePoints != 1: + dim = field.dims[0] + field = field.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim, how='all') + x = x.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim, how='all') + y = y.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim, how='all') + + mask = field.notnull() + + anythingToPlot = np.any(mask) # set up figure if dpi is None: @@ -926,27 +837,25 @@ def plot_vertical_section( # fill the unmasked region with the invalid color so it will show through # any masked regions zeroArray = xr.zeros_like(field) - plt.tricontourf(unmaskedTriangulation, zeroArray.values.ravel(), - colors=invalidColor) + plt.contourf(x.values, y.values, zeroArray.values, + colors=invalidColor) - if maskedTriangulation is not None: + if anythingToPlot: # there's something to plot if not plotAsContours: # display a heatmap of fieldArray - fieldMasked = field.where(mask, 0.0).values.ravel() - if colormapDict['levels'] is None: - plotHandle = plt.tripcolor(maskedTriangulation, fieldMasked, - cmap=colormapDict['colormap'], - norm=colormapDict['norm'], - rasterized=True, shading='gouraud') + plotHandle = plt.pcolormesh(x.values, y.values, field.values, + cmap=colormapDict['colormap'], + norm=colormapDict['norm'], + rasterized=True, shading='gouraud') else: - plotHandle = plt.tricontourf(maskedTriangulation, fieldMasked, - cmap=colormapDict['colormap'], - norm=colormapDict['norm'], - levels=colormapDict['levels'], - extend='both') + plotHandle = plt.contourf(x.values, y.values, field.values, + cmap=colormapDict['colormap'], + norm=colormapDict['norm'], + levels=colormapDict['levels'], + extend='both') cbar = plt.colorbar(plotHandle, orientation='vertical', @@ -960,21 +869,16 @@ def plot_vertical_section( else: # display a white heatmap to get a white background for non-land zeroArray = xr.zeros_like(field) - plt.tricontourf(maskedTriangulation, zeroArray.values.ravel(), - colors='white') + plt.contourf(x.values, y.values, zeroArray.values, colors='white') ax = plt.gca() ax.set_facecolor(backgroundColor) if outlineValid: - if xOutline is not None and zOutline is not None: - # also outline the domain if provided - plt.plot(xOutline, zOutline, color='black', linewidth=1) - else: - # do a contour to outline the boundary between valid and invalid - # values - landMask = np.isnan(field.values).ravel() - plt.tricontour(unmaskedTriangulation, landMask, levels=[0.0001], - colors='black', linewidths=1) + # do a contour to outline the boundary between valid and invalid + # values + landMask = np.isnan(field.values) + plt.contour(x.values, y.values, landMask, levels=[0.0001], + colors='black', linewidths=1) # plot contours, if they were requested contourLevels = colormapDict['contours'] @@ -983,19 +887,19 @@ def plot_vertical_section( cs2 = None plotLegend = False - if contourLevels is not None and maskedTriangulation is not None: + if contourLevels is not None and anythingToPlot: if len(contourLevels) == 0: # automatic calculation of contour levels contourLevels = None - mask = field.notnull() - fieldMasked = field.where(mask, 0.0).values.ravel() - - cs1 = plt.tricontour(maskedTriangulation, fieldMasked, - levels=contourLevels, - colors=lineColor, - linestyles=lineStyle, - linewidths=lineWidth, - cmap=contourColormap) + + cs1 = plt.contour(x.values, + y.values, + field.values, + levels=contourLevels, + colors=lineColor, + linestyles=lineStyle, + linewidths=lineWidth, + cmap=contourColormap) if labelContours: fmt_string = "%%1.%df" % int(contourLabelPrecision) plt.clabel(cs1, fmt=fmt_string) @@ -1004,23 +908,23 @@ def plot_vertical_section( if comparisonContourLineWidth is None: comparisonContourLineWidth = lineWidth mask = contourComparisonField.notnull() - fieldMasked = contourComparisonField.where(mask, 0.0).values.ravel() - cs2 = plt.tricontour(maskedComparisonTriangulation, - fieldMasked, - levels=contourLevels, - colors=comparisonContourLineColor, - linestyles=comparisonContourLineStyle, - linewidths=comparisonContourLineWidth, - cmap=contourColormap) + cs2 = plt.contour(x.values, + y.values, + contourComparisonField.values, + levels=contourLevels, + colors=comparisonContourLineColor, + linestyles=comparisonContourLineStyle, + linewidths=comparisonContourLineWidth, + cmap=contourColormap) if labelContours: plt.clabel(cs2, fmt=fmt_string) plotLegend = (((lineColor is not None and comparisonContourLineColor is not None) or - (lineWidth is not None and + (lineWidth is not None and comparisonContourLineWidth is not None)) and - (plotAsContours and contourComparisonField is not None)) + (plotAsContours and contourComparisonField is not None)) if plotLegend: h1, _ = cs1.legend_elements() @@ -1098,7 +1002,8 @@ def plot_vertical_section( xticks = None if numUpperTicks is not None: xticks = np.linspace(xlimits[0], xlimits[1], numUpperTicks) - tickValues = np.interp(xticks, xCoords[0].values, xCoords[1].values) + tickValues = np.interp(xticks, xCoords[0].values, + xCoords[1].values) ax2.set_xticks(xticks) formatString = "{{0:.{:d}f}}{}".format( upperXAxisTickLabelPrecision, r'$\degree$') @@ -1120,49 +1025,3 @@ def plot_vertical_section( ax3.spines['top'].set_position(('outward', 36)) return fig, ax - - -def _get_triangulation(x, y, mask): - """divide each quad in the x/y mesh into 2 triangles""" - - nx = x.sizes[x.dims[0]] - 1 - ny = x.sizes[x.dims[1]] - 1 - nTriangles = 2 * nx * ny - - mask = mask.values - mask = np.logical_and(np.logical_and(mask[0:-1, 0:-1], mask[1:, 0:-1]), - np.logical_and(mask[0:-1, 1:], mask[1:, 1:])) - triMask = np.zeros((nx, ny, 2), bool) - triMask[:, :, 0] = np.logical_not(mask) - triMask[:, :, 1] = triMask[:, :, 0] - - triMask = triMask.ravel() - - xIndices, yIndices = np.meshgrid(np.arange(nx), np.arange(ny), - indexing='ij') - - tris = np.zeros((nx, ny, 2, 3), int) - # upper triangles: - tris[:, :, 0, 0] = (ny + 1) * xIndices + yIndices - tris[:, :, 0, 1] = (ny + 1) * (xIndices + 1) + yIndices - tris[:, :, 0, 2] = (ny + 1) * xIndices + yIndices + 1 - # lower triangle - tris[:, :, 1, 0] = (ny + 1) * xIndices + yIndices + 1 - tris[:, :, 1, 1] = (ny + 1) * (xIndices + 1) + yIndices - tris[:, :, 1, 2] = (ny + 1) * (xIndices + 1) + yIndices + 1 - - tris = tris.reshape((nTriangles, 3)) - - x = x.values.ravel() - y = y.values.ravel() - - anythingToPlot = not np.all(triMask) - if anythingToPlot: - maskedTriangulation = Triangulation(x=x, y=y, triangles=tris, - mask=triMask) - else: - maskedTriangulation = None - - unmaskedTriangulation = Triangulation(x=x, y=y, triangles=tris) - - return maskedTriangulation, unmaskedTriangulation diff --git a/mpas_analysis/shared/regions/compute_region_masks.py b/mpas_analysis/shared/regions/compute_region_masks.py index 4128d9a37..a23e6448f 100644 --- a/mpas_analysis/shared/regions/compute_region_masks.py +++ b/mpas_analysis/shared/regions/compute_region_masks.py @@ -31,7 +31,7 @@ def __init__(self, config, conponentName): Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Configuration options conponentName : str diff --git a/mpas_analysis/shared/regions/compute_region_masks_subtask.py b/mpas_analysis/shared/regions/compute_region_masks_subtask.py index b72c1fb45..0fb4e1524 100644 --- a/mpas_analysis/shared/regions/compute_region_masks_subtask.py +++ b/mpas_analysis/shared/regions/compute_region_masks_subtask.py @@ -119,7 +119,7 @@ class ComputeRegionMasksSubtask(AnalysisTask): The name of the output mask file obsFileName : str - The name of an observations file to create masks for. But default, + The name of an observations file to create masks for. By default, lon/lat are taken from an MPAS restart file lonVar, latVar : str @@ -268,11 +268,7 @@ def setup_and_check(self): super(ComputeRegionMasksSubtask, self).setup_and_check() if self.useMpasMesh: - try: - self.obsFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS restart file found: need at least one ' - 'restart file to perform region masking.') + self.obsFileName = self.get_mesh_filename() maskSubdirectory = build_config_full_path(self.config, 'output', 'maskSubdirectory') diff --git a/mpas_analysis/shared/time_series/mpas_time_series_task.py b/mpas_analysis/shared/time_series/mpas_time_series_task.py index 9a8e448bb..8fedaad47 100644 --- a/mpas_analysis/shared/time_series/mpas_time_series_task.py +++ b/mpas_analysis/shared/time_series/mpas_time_series_task.py @@ -59,7 +59,7 @@ def __init__(self, config, componentName, taskName=None, Parameters ---------- - config : mpas_tools.config.MpasConfigParser + config : tranche.Tranche Contains configuration options componentName : {'ocean', 'seaIce'} diff --git a/mpas_analysis/shared/transects/compute_transect_masks_subtask.py b/mpas_analysis/shared/transects/compute_transect_masks_subtask.py index 44d75e7ec..63eb618e4 100644 --- a/mpas_analysis/shared/transects/compute_transect_masks_subtask.py +++ b/mpas_analysis/shared/transects/compute_transect_masks_subtask.py @@ -193,11 +193,7 @@ def setup_and_check(self): # self.calendar super(ComputeTransectMasksSubtask, self).setup_and_check() - try: - self.obsFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS restart file found: need at least one ' - 'restart file to perform region masking.') + self.obsFileName = self.get_mesh_filename() self.maskSubdirectory = build_config_full_path(self.config, 'output', 'maskSubdirectory') diff --git a/mpas_analysis/test/test_analysis_task.py b/mpas_analysis/test/test_analysis_task.py index 2080248e9..4673b7df0 100644 --- a/mpas_analysis/test/test_analysis_task.py +++ b/mpas_analysis/test/test_analysis_task.py @@ -16,7 +16,7 @@ import pytest -from mpas_tools.config import MpasConfigParser +from tranche import Tranche from mpas_analysis.test import TestCase from mpas_analysis.shared.analysis_task import AnalysisTask @@ -27,7 +27,7 @@ class TestAnalysisTask(TestCase): def test_checkGenerate(self): def doTest(generate, expectedResults): - config = MpasConfigParser() + config = Tranche() config.set('output', 'generate', generate) for taskName in expectedResults: genericTask = AnalysisTask(config=config, diff --git a/mpas_analysis/test/test_climatology.py b/mpas_analysis/test/test_climatology.py index 2b6e2ce11..aa6d88486 100644 --- a/mpas_analysis/test/test_climatology.py +++ b/mpas_analysis/test/test_climatology.py @@ -23,7 +23,7 @@ import xarray from pyremap import MpasCellMeshDescriptor, LatLonGridDescriptor -from mpas_tools.config import MpasConfigParser +from tranche import Tranche from mpas_analysis.test import TestCase, loaddatadir from mpas_analysis.shared.generalized_reader.generalized_reader \ @@ -47,7 +47,7 @@ def tearDown(self): shutil.rmtree(self.test_dir) def setup_config(self, maxChunkSize=10000): - config = MpasConfigParser() + config = Tranche() config.set('execute', 'mapParallelExec', 'None') config.set('execute', 'mapMpiTasks', '1') @@ -84,7 +84,7 @@ def setup_mpas_remapper(self, config): get_comparison_descriptor(config, comparison_grid_name='latlon') mpasDescriptor = MpasCellMeshDescriptor( - mpasMeshFileName, meshName=config.get('input', 'mpasMeshName')) + mpasMeshFileName, mesh_name=config.get('input', 'mpasMeshName')) remapper = get_remapper( config=config, sourceDescriptor=mpasDescriptor, @@ -100,9 +100,9 @@ def setup_obs_remapper(self, config, fieldName): comparisonDescriptor = \ get_comparison_descriptor(config, comparison_grid_name='latlon') - obsDescriptor = LatLonGridDescriptor.read(fileName=gridFileName, - latVarName='lat', - lonVarName='lon') + obsDescriptor = LatLonGridDescriptor.read(filename=gridFileName, + lat_var_name='lat', + lon_var_name='lon') remapper = \ get_remapper( @@ -151,12 +151,12 @@ def test_get_mpas_remapper(self): remapper = self.setup_mpas_remapper(config) assert (os.path.abspath(mappingFileName) == - os.path.abspath(remapper.mappingFileName)) + os.path.abspath(remapper.map_filename)) assert os.path.exists(mappingFileName) - assert isinstance(remapper.sourceDescriptor, + assert isinstance(remapper.src_descriptor, MpasCellMeshDescriptor) - assert isinstance(remapper.destinationDescriptor, + assert isinstance(remapper.dst_descriptor, LatLonGridDescriptor) if not setName: @@ -182,12 +182,12 @@ def test_get_observations_remapper(self): remapper = self.setup_obs_remapper(config, fieldName) assert (os.path.abspath(mappingFileName) == - os.path.abspath(remapper.mappingFileName)) + os.path.abspath(remapper.map_filename)) assert os.path.exists(mappingFileName) - assert isinstance(remapper.sourceDescriptor, + assert isinstance(remapper.src_descriptor, LatLonGridDescriptor) - assert isinstance(remapper.destinationDescriptor, + assert isinstance(remapper.dst_descriptor, LatLonGridDescriptor) if not setName: diff --git a/mpas_analysis/test/test_generalized_reader.py b/mpas_analysis/test/test_generalized_reader.py index b5a35dc7a..1c80adbf9 100644 --- a/mpas_analysis/test/test_generalized_reader.py +++ b/mpas_analysis/test/test_generalized_reader.py @@ -18,7 +18,7 @@ import numpy import pytest -from mpas_tools.config import MpasConfigParser +from tranche import Tranche from mpas_analysis.test import TestCase, loaddatadir from mpas_analysis.shared.generalized_reader.generalized_reader \ @@ -29,7 +29,7 @@ class TestGeneralizedReader(TestCase): def setup_config(self, maxChunkSize=10000): - config = MpasConfigParser() + config = Tranche() config.set('input', 'maxChunkSize', str(maxChunkSize)) return config diff --git a/mpas_analysis/test/test_mpas_climatology_task.py b/mpas_analysis/test/test_mpas_climatology_task.py index d502666e8..d0e57d3f6 100644 --- a/mpas_analysis/test/test_mpas_climatology_task.py +++ b/mpas_analysis/test/test_mpas_climatology_task.py @@ -19,7 +19,7 @@ import shutil import os -from mpas_tools.config import MpasConfigParser +from tranche import Tranche from mpas_analysis.test import TestCase, loaddatadir from mpas_analysis.shared.climatology import MpasClimatologyTask, \ @@ -44,7 +44,7 @@ def tearDown(self): def setup_config(self): configPath = self.datadir.join('QU240.cfg') - config = MpasConfigParser() + config = Tranche() config.add_from_file(str(configPath)) config.set('input', 'baseDirectory', str(self.datadir)) config.set('output', 'baseDirectory', str(self.test_dir)) diff --git a/mpas_analysis/test/test_mpas_climatology_task/QU240.cfg b/mpas_analysis/test/test_mpas_climatology_task/QU240.cfg index aa6607e78..485a014ad 100644 --- a/mpas_analysis/test/test_mpas_climatology_task/QU240.cfg +++ b/mpas_analysis/test/test_mpas_climatology_task/QU240.cfg @@ -37,3 +37,6 @@ mpasInterpolationMethod = bilinear useNcclimo = True useNcremap = True renormalizationThreshold = 0.01 + +[ocean] +meshStream = mesh diff --git a/mpas_analysis/test/test_mpas_climatology_task/streams.ocean b/mpas_analysis/test/test_mpas_climatology_task/streams.ocean index 1a025681b..392f0b731 100644 --- a/mpas_analysis/test/test_mpas_climatology_task/streams.ocean +++ b/mpas_analysis/test/test_mpas_climatology_task/streams.ocean @@ -3,7 +3,7 @@ 0. ds_out['meltRate'] = ds_out.meltRate.where(mask) ds_out.meltRate.attrs = melt_attrs diff --git a/preprocess_observations/preprocess_paolo_melt.py b/preprocess_observations/preprocess_paolo_melt.py index 91bca7475..4842dea66 100755 --- a/preprocess_observations/preprocess_paolo_melt.py +++ b/preprocess_observations/preprocess_paolo_melt.py @@ -158,13 +158,16 @@ def remap_paolo(in_filename, out_prefix, date, task_count=128): map_filename = f'map_{in_grid_name}_to_{out_grid_name}_{method}.nc' - remapper = Remapper(in_descriptor, out_descriptor, map_filename) + remapper = Remapper( + ntasks=task_count, map_filename=map_filename, method=method) + remapper.src_descriptor = in_descriptor + remapper.dst_descriptor = out_descriptor + remapper.parallel_exec = 'srun' if not os.path.exists(map_filename): - remapper.build_mapping_file(method=method, mpiTasks=task_count, - esmf_parallel_exec='srun') + remapper.build_map() - ds_out = remapper.remap(ds) + ds_out = remapper.remap_numpy(ds) mask = ds_out.meltMask > 0. ds_out['meltRate'] = ds_out.meltRate.where(mask) ds_out.meltRate.attrs = melt_attrs diff --git a/preprocess_observations/remap_rignot.py b/preprocess_observations/remap_rignot.py index 952de79e0..e2a53269a 100644 --- a/preprocess_observations/remap_rignot.py +++ b/preprocess_observations/remap_rignot.py @@ -15,8 +15,7 @@ import pyproj import sys -from mpas_analysis.shared.interpolation import Remapper -from mpas_analysis.shared.grid import ProjectionGridDescriptor +from pyremap import ProjectionGridDescriptor, Remapper from mpas_analysis.shared.mpas_xarray.mpas_xarray import subset_variables from mpas_analysis.shared.climatology \ import get_Antarctic_stereographic_comparison_descriptor @@ -50,21 +49,22 @@ inDescriptor = ProjectionGridDescriptor(projection) -inDescriptor.read(inFileName, xVarName='xaxis', yVarName='yaxis', - meshName=inGridName) +inDescriptor.read(inFileName, x_var_name='xaxis', y_var_name='yaxis', + mesh_name=inGridName) outDescriptor = get_Antarctic_stereographic_comparison_descriptor(config) -outGridName = outDescriptor.meshName +outGridName = outDescriptor.mesh_name outFileName = 'Rignot_2013_melt_rates_{}.nc'.format(outGridName) mappingFileName = 'map_{}_to_{}.nc'.format(inGridName, outGridName) -remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) +remapper = Remapper(map_filename=mappingFileName, method='bilinear') +remapper.src_descriptor = inDescriptor +remapper.dst_descriptor = outDescriptor +remapper.build_map() -remapper.build_mapping_file(method='bilinear') - -remappedDataset = remapper.remap(ds, renormalizationThreshold=0.01) +remappedDataset = remapper.remap_numpy(ds, renormalizationThreshold=0.01) remappedDataset.attrs['history'] = ' '.join(sys.argv) remappedDataset.to_netcdf(outFileName) diff --git a/pyproject.toml b/pyproject.toml index d3d7f0d25..cd7d33ae8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,8 @@ [project] -name = "mpas_analysis" +name = "mpas-analysis" dynamic = ["version"] authors = [ - { name="Xylar Asay-Davis", email="xylar@lanl.gov" }, + { name="Xylar Asay-Davis", email="xylarstorm@gmail.com" }, { name="Carolyn Begeman" }, { name="Phillip J. Wolfram" }, { name="Milena Veneziani" }, @@ -27,10 +27,9 @@ description = """\ """ license = { file = "LICENSE" } readme = "README.md" -requires-python = ">=3.9" +requires-python = ">=3.10" classifiers = [ # these are only for searching/browsing projects on PyPI - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -45,7 +44,7 @@ classifiers = [ ] dependencies = [ - "cartopy>=0.18.0", + "cartopy >=0.18.0", "cmocean", "dask", "gsw", @@ -59,11 +58,27 @@ dependencies = [ "pyproj", "python-dateutil", "requests", - "scipy>=1.7.0", - "setuptools", - "shapely>=2.0,<3.0", - "six", - "xarray>=0.14.1" + "scipy >=1.7.0", + "shapely >=2.0,<3.0", + "tranche >=0.2.3", + "xarray >=0.14.1" +] + +[project.optional-dependencies] +docs = [ + # building documentation + "mock", + "m2r2>=0.3.3", + "mistune<2", + "sphinx", + "sphinx_rtd_theme", + "tabulate", +] + +dev = [ + # linting and testing + "pip", + "pytest", ] [build-system] diff --git a/suite/main.cfg b/suite/main.cfg new file mode 100644 index 000000000..f66fdd462 --- /dev/null +++ b/suite/main.cfg @@ -0,0 +1,33 @@ +[climatologyMapCustom] +## options related to plotting climatology maps of any field at various depths +## (if they include a depth dimension) without observatons for comparison + +# a list of fields top plot for each depth slice. All supported fields are +# listed above +variables = [ + 'temperature', + 'salinity', + 'potentialDensity', + 'thermalForcing', + 'zonalVelocity', + 'meridionalVelocity', + 'velocityMagnitude', + 'vertVelocity', + 'vertDiff', + 'vertVisc', + 'mixedLayerDepth' + ] + +[climatologyMapMassFluxes] + +# excluding icebergFreshWaterFlux, which is not present in the test simulation +# output +variables = [ + 'riverRunoffFlux', + 'iceRunoffFlux', + 'snowFlux', + 'rainFlux', + 'evaporationFlux', + 'seaIceFreshWaterFlux', + 'landIceFreshwaterFlux' + ] diff --git a/suite/run_dev_suite.bash b/suite/run_dev_suite.bash index 0dc955bb7..f3db1df68 100755 --- a/suite/run_dev_suite.bash +++ b/suite/run_dev_suite.bash @@ -2,7 +2,7 @@ set -e -env_name=mpas_dev +env_name=mpas_analysis_dev conda_base=$(dirname $(dirname $CONDA_EXE)) source $conda_base/etc/profile.d/conda.sh @@ -14,13 +14,12 @@ branch=$(git symbolic-ref --short HEAD) # test building the docs conda activate ${env_name} cd docs -make clean -make html +DOCS_VERSION=test make clean versioned-html cd .. machine=$(python -c "from mache import discover_machine; print(discover_machine())") -py=3.11 +py=3.13 ./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --copy_docs --clean -e ${env_name} ./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions -e ${env_name} ./suite/setup.py -p ${py} -r moc_am -b ${branch} -e ${env_name} @@ -34,7 +33,7 @@ py=3.11 # submit the jobs cd ${machine}_test_suite -main_py=3.11 +main_py=3.13 cd main_py${main_py} echo main_py${main_py} RES=$(sbatch job_script.bash) diff --git a/suite/run_e3sm_unified_suite.bash b/suite/run_e3sm_unified_suite.bash index 994bc43d9..b060ede13 100755 --- a/suite/run_e3sm_unified_suite.bash +++ b/suite/run_e3sm_unified_suite.bash @@ -6,7 +6,7 @@ set -e branch=test_e3sm_unified # test building the docs -py=3.10 +py=3.13 machine=${E3SMU_MACHINE} ./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --clean diff --git a/suite/run_suite.bash b/suite/run_suite.bash index 70114596a..ee8dd0b16 100755 --- a/suite/run_suite.bash +++ b/suite/run_suite.bash @@ -5,8 +5,8 @@ set -e conda_base=$(dirname $(dirname $CONDA_EXE)) source $conda_base/etc/profile.d/conda.sh -main_py=3.11 -alt_py=3.10 +main_py=3.13 +alt_py=3.12 export HDF5_USE_FILE_LOCKING=FALSE @@ -41,8 +41,7 @@ conda deactivate py=${main_py} conda activate test_mpas_analysis_py${py} cd docs -make clean -make html +DOCS_VERSION=test make clean versioned-html cd .. machine=$(python -c "from mache import discover_machine; print(discover_machine())") diff --git a/suite/setup.py b/suite/setup.py index dbab86a75..df41ce0e2 100755 --- a/suite/setup.py +++ b/suite/setup.py @@ -146,6 +146,10 @@ def main(): [config_from_job, os.path.join('..', '..', 'suite', f'{args.run}.cfg')]) + if args.run.startswith('main_py'): + config_from_job = ' '.join( + [config_from_job, os.path.join('..', '..', 'suite', 'main.cfg')]) + if args.run not in ['main', 'ctrl']: try: os.makedirs(os.path.join(suite_path, args.run))