From 6bdb34a06df86a9326b1db81fdc5f20c607be8b9 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 7 Sep 2020 15:28:42 +0200 Subject: [PATCH 01/58] Update tutorial to include HDF5_USE_FILE_LOCKING=FALSE --- docs/tutorials/getting_started.rst | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/docs/tutorials/getting_started.rst b/docs/tutorials/getting_started.rst index d9318160e..c9cf65ef7 100644 --- a/docs/tutorials/getting_started.rst +++ b/docs/tutorials/getting_started.rst @@ -467,10 +467,25 @@ years (3 to 5) and displaying time series and the El NiƱo index over the full 5 Running MPAS-Analysis ----------------------- -The hard work is done. Now that we have a config file, we are ready to run: +The hard work is done. Now that we have a config file, we are ready to run. + +On many file systems, MPAS-Analysis and other python-based software that used +NetCDF files based on the HDF5 file structure can experience file access errors +unless the following environment variable is set as follows in bash: .. code-block:: bash + $ export HDF5_USE_FILE_LOCKING=FALSE + +or under csh: + +.. code-block:: csh + + > setenv HDF5_USE_FILE_LOCKING FALSE + +Then, running MPAS-Analysis is as simple as: + +.. code-block:: bash $ mpas_analysis myrun.cfg Typical output is the analysis is running correctly looks something like: From 33fc5f7b9649369661bd13f15952a27bc324ce91 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 11:28:33 +0200 Subject: [PATCH 02/58] Close log files after tasks finish Without this fix, there are warnings about unclosed files. --- mpas_analysis/shared/analysis_task.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index a6a3af91f..6144b307e 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -342,6 +342,7 @@ def run(self, writeLogFile=True): # {{{ self.logger.info('Execution time: {}:{:02d}:{:05.2f}'.format(h, m, s)) if writeLogFile: + handler.close() # restore stdout and stderr sys.stdout = oldStdout sys.stderr = oldStderr From df084ce1c6629d96d29b56b6ced2240843bafc74 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 12:43:27 +0200 Subject: [PATCH 03/58] Fix incorrect spacing in tutorial --- docs/tutorials/getting_started.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/tutorials/getting_started.rst b/docs/tutorials/getting_started.rst index c9cf65ef7..65d14fdd6 100644 --- a/docs/tutorials/getting_started.rst +++ b/docs/tutorials/getting_started.rst @@ -486,6 +486,7 @@ or under csh: Then, running MPAS-Analysis is as simple as: .. code-block:: bash + $ mpas_analysis myrun.cfg Typical output is the analysis is running correctly looks something like: From aa41b416a12c9acd1f390e0d83d02e91f7f727c5 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 13:39:55 +0200 Subject: [PATCH 04/58] Remove unused imports --- mpas_analysis/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/mpas_analysis/__init__.py b/mpas_analysis/__init__.py index 657a4e651..92f4d2f13 100644 --- a/mpas_analysis/__init__.py +++ b/mpas_analysis/__init__.py @@ -1,7 +1,5 @@ # make sure to set the Agg backend that works even without x-forwarding # before any other matplotlib imports -import sys -import warnings import matplotlib as mpl mpl.use('Agg') From 1ca28b306562daa3d2d62bfd360309ea9e922438 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 13:40:12 +0200 Subject: [PATCH 05/58] Update depricated progressbar argument maxval --> max_value --- mpas_analysis/__main__.py | 2 +- mpas_analysis/shared/io/download.py | 2 +- mpas_analysis/shared/regions/compute_region_masks_subtask.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index 037cdcec0..34f4187f3 100755 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -493,7 +493,7 @@ def run_analysis(config, analyses): # {{{ widgets = ['Running tasks: ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()] progress = progressbar.ProgressBar(widgets=widgets, - maxval=totalTaskCount).start() + max_value=totalTaskCount).start() runningProcessCount = 0 diff --git a/mpas_analysis/shared/io/download.py b/mpas_analysis/shared/io/download.py index 47a967f7c..0da45a91d 100644 --- a/mpas_analysis/shared/io/download.py +++ b/mpas_analysis/shared/io/download.py @@ -102,7 +102,7 @@ def download_files(fileList, urlBase, outDir, verify=True): widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()] bar = progressbar.ProgressBar(widgets=widgets, - maxval=totalSize).start() + max_value=totalSize).start() size = 0 with open(outFileName, 'wb') as f: try: diff --git a/mpas_analysis/shared/regions/compute_region_masks_subtask.py b/mpas_analysis/shared/regions/compute_region_masks_subtask.py index 65bd5220d..1230625bf 100644 --- a/mpas_analysis/shared/regions/compute_region_masks_subtask.py +++ b/mpas_analysis/shared/regions/compute_region_masks_subtask.py @@ -249,7 +249,7 @@ def compute_region_masks(geojsonFileName, cellPoints, maskFileName, widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()] bar = progressbar.ProgressBar(widgets=widgets, - maxval=nChunks).start() + max_value=nChunks).start() mask = numpy.zeros((nCells,), bool) for iChunk, maskChunk in \ From d5f41ed6c35aee662bbe610c535f9a3162289320 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 13:42:48 +0200 Subject: [PATCH 06/58] Fix depricated ConfigParser method readfp --> read_file --- .../shared/climatology/ref_year_mpas_climatology_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mpas_analysis/shared/climatology/ref_year_mpas_climatology_task.py b/mpas_analysis/shared/climatology/ref_year_mpas_climatology_task.py index b5db4074b..cc9d7aa3b 100644 --- a/mpas_analysis/shared/climatology/ref_year_mpas_climatology_task.py +++ b/mpas_analysis/shared/climatology/ref_year_mpas_climatology_task.py @@ -68,7 +68,7 @@ def __init__(self, config, componentName, taskName=None): # {{{ # We must reset the buffer to make it ready for reading. config_string.seek(0) new_config = MpasAnalysisConfigParser() - new_config.readfp(config_string) + new_config.read_file(config_string) # call the constructor from the base class (AnalysisTask) super(RefYearMpasClimatologyTask, self).__init__( From 7f27aab492f4caaa8671711624059445778aa81b Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 13:44:32 +0200 Subject: [PATCH 07/58] Fix depricated numpy array method for getting string tostring --> tobytes --- mpas_analysis/shared/timekeeping/utility.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/shared/timekeeping/utility.py b/mpas_analysis/shared/timekeeping/utility.py index c7bf8dc37..172637c48 100644 --- a/mpas_analysis/shared/timekeeping/utility.py +++ b/mpas_analysis/shared/timekeeping/utility.py @@ -63,9 +63,9 @@ def get_simulation_start_time(streams): ds = xarray.open_dataset(restartFile) da = ds.simulationStartTime if da.dtype.type is numpy.string_: - simulationStartTime = bytes.decode(da.values.tostring()) + simulationStartTime = bytes.decode(da.values.tobytes()) else: - simulationStartTime = da.values.tostring() + simulationStartTime = da.values.tobytes() # replace underscores so it works as a CF-compliant reference date simulationStartTime = simulationStartTime.rstrip('\x00').replace('_', ' ') From 112a985f132b91941a9c43d3a2a8acc28916e607 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 13:50:56 +0200 Subject: [PATCH 08/58] Fix depricated method drop --> drop_vars... in xarray Datasets. --- mpas_analysis/ocean/climatology_map_argo.py | 6 +++--- mpas_analysis/ocean/climatology_map_bgc.py | 8 ++++---- mpas_analysis/ocean/climatology_map_eke.py | 8 ++++---- mpas_analysis/ocean/climatology_map_ohc_anomaly.py | 2 +- mpas_analysis/ocean/climatology_map_ssh.py | 2 +- mpas_analysis/ocean/climatology_map_woa.py | 4 ++-- mpas_analysis/ocean/compute_transects_subtask.py | 4 ++-- .../ocean/plot_depth_integrated_time_series_subtask.py | 2 +- mpas_analysis/ocean/remap_depth_slices_subtask.py | 2 +- mpas_analysis/ocean/remap_sose_climatology.py | 4 ++-- mpas_analysis/ocean/sose_transects.py | 2 +- 11 files changed, 22 insertions(+), 22 deletions(-) diff --git a/mpas_analysis/ocean/climatology_map_argo.py b/mpas_analysis/ocean/climatology_map_argo.py index 0b6f246cd..f68267d3c 100644 --- a/mpas_analysis/ocean/climatology_map_argo.py +++ b/mpas_analysis/ocean/climatology_map_argo.py @@ -443,11 +443,11 @@ def build_observational_dataset(self, fileName): # {{{ field = dsObs[self.fieldName] for depth in self.depths: if depth == 'top': - slices.append(field.sel(method='nearest', depth=0.).drop( + slices.append(field.sel(method='nearest', depth=0.).drop_vars( 'depth')) else: - slices.append(field.sel(method='nearest', depth=depth).drop( - 'depth')) + slices.append( + field.sel(method='nearest', depth=depth).drop_vars('depth')) depthNames = [str(depth) for depth in self.depths] field = xr.concat(slices, dim='depthSlice') diff --git a/mpas_analysis/ocean/climatology_map_bgc.py b/mpas_analysis/ocean/climatology_map_bgc.py index 9a666412c..7e9b2f8af 100644 --- a/mpas_analysis/ocean/climatology_map_bgc.py +++ b/mpas_analysis/ocean/climatology_map_bgc.py @@ -276,10 +276,10 @@ def customize_masked_climatology(self, climatology, season): # {{{ climatology['Chl'] = spChl + diatChl + diazChl + phaeoChl climatology.Chl.attrs['units'] = 'mg m$^{-3}$' climatology.Chl.attrs['description'] = 'Sum of all PFT chlorophyll' - climatology.drop(['timeMonthly_avg_ecosysTracers_spChl', - 'timeMonthly_avg_ecosysTracers_diatChl', - 'timeMonthly_avg_ecosysTracers_diazChl', - 'timeMonthly_avg_ecosysTracers_phaeoChl']) + climatology.drop_vars(['timeMonthly_avg_ecosysTracers_spChl', + 'timeMonthly_avg_ecosysTracers_diatChl', + 'timeMonthly_avg_ecosysTracers_diazChl', + 'timeMonthly_avg_ecosysTracers_phaeoChl']) return climatology # }}} diff --git a/mpas_analysis/ocean/climatology_map_eke.py b/mpas_analysis/ocean/climatology_map_eke.py index f4851709e..b4826c76b 100644 --- a/mpas_analysis/ocean/climatology_map_eke.py +++ b/mpas_analysis/ocean/climatology_map_eke.py @@ -204,10 +204,10 @@ def customize_masked_climatology(self, climatology, season): # {{{ climatology.timeMonthly_avg_velocityMeridional ** 2) # drop unnecessary fields before re-mapping - climatology.drop(['timeMonthly_avg_velocityZonal', - 'timeMonthly_avg_velocityMeridional', - 'timeMonthly_avg_velocityZonalSquared', - 'timeMonthly_avg_velocityMeridionalSquared']) + climatology.drop_vars(['timeMonthly_avg_velocityZonal', + 'timeMonthly_avg_velocityMeridional', + 'timeMonthly_avg_velocityZonalSquared', + 'timeMonthly_avg_velocityMeridionalSquared']) # this creates a new variable eke in climatology (like netcdf) climatology['eke'] = eke diff --git a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py index 2438a04cb..598508c67 100644 --- a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py +++ b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py @@ -328,7 +328,7 @@ def customize_masked_climatology(self, climatology, season): # {{{ 'Anomaly from year {} in ocean heat content'.format( self.refYearClimatolgyTask.startYear) - climatology = climatology.drop(self.variableList) + climatology = climatology.drop_vars(self.variableList) return climatology # }}} diff --git a/mpas_analysis/ocean/climatology_map_ssh.py b/mpas_analysis/ocean/climatology_map_ssh.py index 7d20a3517..297ba2d7f 100644 --- a/mpas_analysis/ocean/climatology_map_ssh.py +++ b/mpas_analysis/ocean/climatology_map_ssh.py @@ -249,7 +249,7 @@ def build_observational_dataset(self, fileName): # {{{ dsObs = dsObs.rename({'time': 'Time'}) dsObs.coords['month'] = dsObs['Time.month'] dsObs.coords['year'] = dsObs['Time.year'] - dsObs = dsObs.drop(['Time', 'time_bnds']) + dsObs = dsObs.drop_vars(['Time', 'time_bnds']) # scale the field to cm from m dsObs['zos'] = constants.cm_per_m * dsObs['zos'] diff --git a/mpas_analysis/ocean/climatology_map_woa.py b/mpas_analysis/ocean/climatology_map_woa.py index 7178feaa0..2bfbd4880 100644 --- a/mpas_analysis/ocean/climatology_map_woa.py +++ b/mpas_analysis/ocean/climatology_map_woa.py @@ -386,10 +386,10 @@ def build_observational_dataset(self, fileName): # {{{ for depth in self.depths: if depth == 'top': slices.append(field.sel(method='nearest', - depth=0.).drop('depth')) + depth=0.).drop_vars('depth')) else: slices.append(field.sel(method='nearest', - depth=-depth).drop('depth')) + depth=-depth).drop_vars('depth')) depthNames = [str(depth) for depth in self.depths] field = xr.concat(slices, dim='depthSlice') diff --git a/mpas_analysis/ocean/compute_transects_subtask.py b/mpas_analysis/ocean/compute_transects_subtask.py index cdc78f29f..fd299856e 100644 --- a/mpas_analysis/ocean/compute_transects_subtask.py +++ b/mpas_analysis/ocean/compute_transects_subtask.py @@ -400,7 +400,7 @@ def _vertical_interp(self, ds, transectIndex, dsObs, outFileName, dsObs = dsObs.rename({'nzOut': 'nz'}) write_netcdf(dsObs, outObsFileName) - ds = ds.drop(['validMask', 'transectNumber']) + ds = ds.drop_vars(['validMask', 'transectNumber']) write_netcdf(ds, outFileName) # }}} # }}} @@ -638,7 +638,7 @@ def _subdivide_observations(self, dsObs): # {{{ dsObs = interp_1d(dsObs, inInterpDim='nPoints', inInterpCoord='xIn', outInterpDim='nPointsOut', outInterpCoord='xOut') - dsObs = dsObs.drop(['xIn']) + dsObs = dsObs.drop_vars(['xIn']) dsObs = dsObs.rename({'nPointsOut': 'nPoints', 'xOut': 'x'}) return dsObs # }}} diff --git a/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.py b/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.py index 2e2c5808b..23340149f 100644 --- a/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.py +++ b/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.py @@ -382,7 +382,7 @@ def run_task(self): # {{{ # rolling mean seems to have trouble with dask data sets so we # write out the data set and read it back as a single-file data set # (without dask) - dsPreprocessed = dsPreprocessed.drop('xtime') + dsPreprocessed = dsPreprocessed.drop_vars('xtime') write_netcdf(dsPreprocessed, self.preprocessedFileName) dsPreprocessed = xarray.open_dataset(self.preprocessedFileName) diff --git a/mpas_analysis/ocean/remap_depth_slices_subtask.py b/mpas_analysis/ocean/remap_depth_slices_subtask.py index 210a3e43e..51603b757 100644 --- a/mpas_analysis/ocean/remap_depth_slices_subtask.py +++ b/mpas_analysis/ocean/remap_depth_slices_subtask.py @@ -219,7 +219,7 @@ def customize_masked_climatology(self, climatology, season): # {{{ climatology[variableName] = \ da.sum(dim='nVertLevels').where(self.verticalIndexMask) - climatology = climatology.drop('verticalIndex') + climatology = climatology.drop_vars('verticalIndex') climatology = climatology.transpose('depthSlice', 'nCells') diff --git a/mpas_analysis/ocean/remap_sose_climatology.py b/mpas_analysis/ocean/remap_sose_climatology.py index 0478f9074..18171bc8d 100644 --- a/mpas_analysis/ocean/remap_sose_climatology.py +++ b/mpas_analysis/ocean/remap_sose_climatology.py @@ -151,12 +151,12 @@ def build_observational_dataset(self, fileName): # {{{ slices = [] for depth in self.depths: if depth == 'top': - slices.append(field.sel(method='nearest', z=0.).drop( + slices.append(field.sel(method='nearest', z=0.).drop_vars( 'z')) elif depth == 'bot': slices.append(dsObs[self.botFieldName]) else: - level = field.sel(method='nearest', z=depth).drop( + level = field.sel(method='nearest', z=depth).drop_vars( 'z') slices.append(level) diff --git a/mpas_analysis/ocean/sose_transects.py b/mpas_analysis/ocean/sose_transects.py index 9f764ed48..46d62e5a3 100644 --- a/mpas_analysis/ocean/sose_transects.py +++ b/mpas_analysis/ocean/sose_transects.py @@ -419,7 +419,7 @@ def build_observational_dataset(self, fileName, transectName): # {{{ dsObs['lat'] = dsObs.nPoints dsObs['z'] = dsObs.nz dsObs['lon'] = ('nPoints', lon * numpy.ones(dsObs.sizes['nPoints'])) - dsObs = dsObs.drop(['nPoints', 'nz']) + dsObs = dsObs.drop_vars(['nPoints', 'nz']) return dsObs # }}} From a21060ca53e4d2ee2bf15a73f94ada4947c9b8b5 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 14:01:33 +0200 Subject: [PATCH 09/58] Switch from depricated Dataset.apply to map method --- mpas_analysis/config.default | 2 +- mpas_analysis/shared/interpolation/interp_1d.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 475da1bf4..5b0b7a87b 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -1266,7 +1266,7 @@ regionNames = ['all'] # a list of variables to plot variables = [{'name': 'temperature', 'title': 'Temperature', - 'units': '$^\circ$C', + 'units': r'$^\circ$C', 'mpas': 'timeMonthly_avg_activeTracers_temperature'}, {'name': 'salinity', 'title': 'Salinity', diff --git a/mpas_analysis/shared/interpolation/interp_1d.py b/mpas_analysis/shared/interpolation/interp_1d.py index f812e4719..7cbd8e370 100644 --- a/mpas_analysis/shared/interpolation/interp_1d.py +++ b/mpas_analysis/shared/interpolation/interp_1d.py @@ -44,7 +44,7 @@ def interp_1d(ds, inInterpDim, inInterpCoord, outInterpDim, coords = list(ds.coords) ds = ds.reset_coords(coords) - ds = ds.apply(_interp_1d_array, args=(indices, weight0, inInterpDim)) + ds = ds.map(_interp_1d_array, args=(indices, weight0, inInterpDim)) # conert back to coords ds = ds.set_coords(coords) From f27df4cc1b484da72c6a1fa272124db37a43bdc0 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 15:35:02 +0200 Subject: [PATCH 10/58] Git rid of boundaries arg to colorbar Apparently, the contourf call is already determining the boundaries --- mpas_analysis/shared/plot/climatology_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mpas_analysis/shared/plot/climatology_map.py b/mpas_analysis/shared/plot/climatology_map.py index b4c4479d7..838a23878 100644 --- a/mpas_analysis/shared/plot/climatology_map.py +++ b/mpas_analysis/shared/plot/climatology_map.py @@ -160,7 +160,7 @@ def do_subplot(ax, field, title, colormap, norm, levels, ticks, contours, divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.1, axes_class=plt.Axes) - cbar = plt.colorbar(plotHandle, cax=cax, boundaries=levels) + cbar = plt.colorbar(plotHandle, cax=cax) cbar.set_label(cbarlabel) if ticks is not None: cbar.set_ticks(ticks) From 40a07df60e58406fa6995ea99bc44c019ea5b4c5 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 10 Sep 2020 15:54:13 +0200 Subject: [PATCH 11/58] Close taskProgress.log --- mpas_analysis/__main__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index 34f4187f3..709822aba 100755 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -592,6 +592,7 @@ def run_analysis(config, analyses): # {{{ # blank line to make sure remaining output is on a new line print('') + handler.close() logger.handlers = [] # raise the last exception so the process exits with an error From c9187e2b78f00fc7d75fd67d8397115db9b99d70 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 13 Sep 2020 04:07:04 -0700 Subject: [PATCH 12/58] Use only one subprocesss for MPAS-Tools mask creator It does not need to block all tasks because it isn't running in parallel. --- mpas_analysis/shared/regions/compute_region_masks.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mpas_analysis/shared/regions/compute_region_masks.py b/mpas_analysis/shared/regions/compute_region_masks.py index 212dcf2b7..e24e84d6d 100644 --- a/mpas_analysis/shared/regions/compute_region_masks.py +++ b/mpas_analysis/shared/regions/compute_region_masks.py @@ -106,6 +106,9 @@ def add_mask_subtask(self, geojsonFileName, outFileSuffix, obsFileName=None, if obsFileName is not None: useMpasMaskCreator = False + if useMpasMaskCreator: + subprocessCount = 1 + maskSubtask = ComputeRegionMasksSubtask( self, geojsonFileName, outFileSuffix, featureList=None, subtaskName=subtaskName, From e17950bada51480ea25289b6c035ebc9f77ae556 Mon Sep 17 00:00:00 2001 From: Milena Veneziani Date: Thu, 1 Oct 2020 12:58:14 -0700 Subject: [PATCH 13/58] Adds global mean and rsme of bias on climo plots --- mpas_analysis/shared/plot/climatology_map.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mpas_analysis/shared/plot/climatology_map.py b/mpas_analysis/shared/plot/climatology_map.py index 838a23878..3ede0a808 100644 --- a/mpas_analysis/shared/plot/climatology_map.py +++ b/mpas_analysis/shared/plot/climatology_map.py @@ -216,6 +216,8 @@ def do_subplot(ax, field, title, colormap, norm, levels, ticks, contours, do_subplot(ax=ax, field=refArray, title=refTitle, **dictModelRef) ax = plt.subplot(subplots[2], projection=projection) + diffTitle = '{} (mean={:5.2f}, rms={:5.2f})'.format( + diffTitle, np.nanmean(diffArray), np.nanstd(diffArray)) do_subplot(ax=ax, field=diffArray, title=diffTitle, **dictDiff) fig.canvas.draw() @@ -386,6 +388,8 @@ def plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, plot_panel(ax, refTitle, refArray, **dictModelRef) ax = plt.subplot(subplots[2], projection=projection) + diffTitle = '{} (mean={:5.2f}, rms={:5.2f})'.format( + diffTitle, np.nanmean(diffArray), np.nanstd(diffArray)) plot_panel(ax, diffTitle, diffArray, **dictDiff) if (fileout is not None): From e90139bbddf5b8deefdff5630de9d6fbf96b7b5c Mon Sep 17 00:00:00 2001 From: Milena Veneziani Date: Sat, 3 Oct 2020 00:22:52 -0700 Subject: [PATCH 14/58] Adjusted mean/std values per latitude weights --- mpas_analysis/shared/plot/climatology_map.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/mpas_analysis/shared/plot/climatology_map.py b/mpas_analysis/shared/plot/climatology_map.py index 3ede0a808..faeace0ed 100644 --- a/mpas_analysis/shared/plot/climatology_map.py +++ b/mpas_analysis/shared/plot/climatology_map.py @@ -216,8 +216,6 @@ def do_subplot(ax, field, title, colormap, norm, levels, ticks, contours, do_subplot(ax=ax, field=refArray, title=refTitle, **dictModelRef) ax = plt.subplot(subplots[2], projection=projection) - diffTitle = '{} (mean={:5.2f}, rms={:5.2f})'.format( - diffTitle, np.nanmean(diffArray), np.nanstd(diffArray)) do_subplot(ax=ax, field=diffArray, title=diffTitle, **dictDiff) fig.canvas.draw() @@ -387,9 +385,12 @@ def plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, ax = plt.subplot(subplots[1], projection=projection) plot_panel(ax, refTitle, refArray, **dictModelRef) + weights = np.cos(np.deg2rad(Lats)) + mean = np.average(diffArray, weights=weights) + std = np.sqrt(np.average((diffArray - mean)**2, weights=weights)) + diffTitle = '{} (mean={:5.2f}, std={:5.2f})'.format( + diffTitle, mean, std) ax = plt.subplot(subplots[2], projection=projection) - diffTitle = '{} (mean={:5.2f}, rms={:5.2f})'.format( - diffTitle, np.nanmean(diffArray), np.nanstd(diffArray)) plot_panel(ax, diffTitle, diffArray, **dictDiff) if (fileout is not None): From 8358ac69ae6617048bd032b983dd865a0a767abe Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 4 Oct 2020 11:37:44 +0200 Subject: [PATCH 15/58] Fix masking of ocean ocean regions The mask and time series need to be loaded before masking. Otherwise, regions with no cells cause NetCDF problems in xarray 0.16.1 --- mpas_analysis/ocean/time_series_ocean_regions.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index 4435a911d..9a24ea082 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -514,6 +514,7 @@ def run_task(self): # {{{ variableList=variableList, startDate=startDate, endDate=endDate).isel(Time=0) + dsIn.load() layerThickness = dsIn.timeMonthly_avg_layerThickness @@ -522,6 +523,7 @@ def run_task(self): # {{{ self.logger.info(' region: {}'.format( self.regionNames[regionIndex])) dsRegion = dsRegionMask.isel(nRegions=regionIndex) + dsRegion.load() cellMask = dsRegion.cellMask totalArea = dsRegion.totalArea depthMask = dsRegion.depthMask.where(cellMask, drop=True) From ad1f530b6b189e689fd92035a1b1656a3011a3bb Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 4 Oct 2020 12:26:11 +0200 Subject: [PATCH 16/58] Load ds before masking in T-S diagrams --- mpas_analysis/ocean/regional_ts_diagrams.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mpas_analysis/ocean/regional_ts_diagrams.py b/mpas_analysis/ocean/regional_ts_diagrams.py index 05e92666f..693aea586 100644 --- a/mpas_analysis/ocean/regional_ts_diagrams.py +++ b/mpas_analysis/ocean/regional_ts_diagrams.py @@ -672,6 +672,8 @@ def _write_mpas_t_s(self, config): # {{{ ds['volume'] = (dsRestart.areaCell * ds['timeMonthly_avg_layerThickness']) + ds.load() + ds = ds.where(cellMask, drop=True) self.logger.info("Don't worry about the following dask " From 7515c2a647932edd2a8a71cc7d940c502575f85a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 25 Oct 2020 04:45:28 -0500 Subject: [PATCH 17/58] Update example config file and job script for Anvil The old job was big and may be gone (or certainly won't be around long-term). The new one is small, cheap, good for testing and will be around awhile. --- ...201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cfg} | 25 ++++++++++++------- configs/anvil/job_script.anvil.bash | 5 ++-- 2 files changed, 18 insertions(+), 12 deletions(-) rename configs/anvil/{config.SO60to10ISC.20200108 => 20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cfg} (91%) diff --git a/configs/anvil/config.SO60to10ISC.20200108 b/configs/anvil/20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cfg similarity index 91% rename from configs/anvil/config.SO60to10ISC.20200108 rename to configs/anvil/20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cfg index 2f8efaf05..e2174088a 100644 --- a/configs/anvil/config.SO60to10ISC.20200108 +++ b/configs/anvil/20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cfg @@ -3,7 +3,7 @@ ## compared against # mainRunName is a name that identifies the simulation being analyzed. -mainRunName = SO60to10ISC.20200108 +mainRunName = GMPAS-IAF.T62_oQU240wLIC # config file for a control run to which this run will be compared. The # analysis should have already been run to completion once with this config @@ -45,17 +45,24 @@ baseDirectory = /lcrc/group/acme/diagnostics ## options related to reading in the results to be analyzed # directory containing model results -baseDirectory = /lcrc/group/acme/kehoch/acme_scratch/anvil/SO60to10ISC.20200108/run +baseDirectory = /lcrc/group/acme/ac.xylar/acme_scratch/anvil/20201024.GMPAS-IAF.T62_oQU240wLIC.anvil # names of ocean and sea ice meshes (e.g. oEC60to30v3, oQU240v3, oRRS30to10v3, etc.) -mpasMeshName = oSO60to10wISC +mpasMeshName = oQU240wLI + +# subdirectory containing restart files +runSubdirectory = run +# subdirectory for ocean history files +oceanHistorySubdirectory = archive/ocn/hist +# subdirectory for sea ice history files +seaIceHistorySubdirectory = archive/ice/hist # names of namelist and streams files, either a path relative to baseDirectory # or an absolute path. -oceanNamelistFileName = mpaso_in -oceanStreamsFileName = streams.ocean -seaIceNamelistFileName = mpassi_in -seaIceStreamsFileName = streams.seaice +oceanNamelistFileName = run/mpaso_in +oceanStreamsFileName = run/streams.ocean +seaIceNamelistFileName = run/mpassi_in +seaIceStreamsFileName = run/streams.seaice [output] ## options related to writing out plots, intermediate cached data sets, logs, @@ -101,9 +108,9 @@ generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke', 'no_min', ## observations and previous runs # the first year over which to average climatalogies -startYear = 6 +startYear = 4 # the last year over which to average climatalogies -endYear = 10 +endYear = 8 [timeSeries] ## options related to producing time series plots, often to compare against diff --git a/configs/anvil/job_script.anvil.bash b/configs/anvil/job_script.anvil.bash index c100efe73..11594bd09 100644 --- a/configs/anvil/job_script.anvil.bash +++ b/configs/anvil/job_script.anvil.bash @@ -10,9 +10,8 @@ cd $SLURM_SUBMIT_DIR export OMP_NUM_THREADS=1 -source /home/xylar/miniconda3/etc/profile.d/conda.sh -conda activate mpas-analysis +source /lcrc/soft/climate/e3sm-unified/load_latest_e3sm_unified.sh export HDF5_USE_FILE_LOCKING=FALSE -srun -N 1 -n 1 python -m mpas_analysis configs/polarRegions.conf config.SO60to10ISC.20200108 +srun -N 1 -n 1 python -m mpas_analysis configs/polarRegions.conf 20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cg From 32d919f8bb2948509745a4e5b290136b21d1e0dd Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 25 Oct 2020 16:52:11 -0500 Subject: [PATCH 18/58] Switch to different QU240 run (with land-ice fluxes) --- ...C.anvil.cfg => 20201025.GMPAS-IAF.T62_oQU240wLI.anvil.cfg} | 4 ++-- configs/anvil/job_script.anvil.bash | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) rename configs/anvil/{20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cfg => 20201025.GMPAS-IAF.T62_oQU240wLI.anvil.cfg} (98%) diff --git a/configs/anvil/20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cfg b/configs/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil.cfg similarity index 98% rename from configs/anvil/20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cfg rename to configs/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil.cfg index e2174088a..e1f6cc0a3 100644 --- a/configs/anvil/20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cfg +++ b/configs/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil.cfg @@ -3,7 +3,7 @@ ## compared against # mainRunName is a name that identifies the simulation being analyzed. -mainRunName = GMPAS-IAF.T62_oQU240wLIC +mainRunName = GMPAS-IAF.T62_oQU240wLI # config file for a control run to which this run will be compared. The # analysis should have already been run to completion once with this config @@ -45,7 +45,7 @@ baseDirectory = /lcrc/group/acme/diagnostics ## options related to reading in the results to be analyzed # directory containing model results -baseDirectory = /lcrc/group/acme/ac.xylar/acme_scratch/anvil/20201024.GMPAS-IAF.T62_oQU240wLIC.anvil +baseDirectory = /lcrc/group/acme/ac.xylar/acme_scratch/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil # names of ocean and sea ice meshes (e.g. oEC60to30v3, oQU240v3, oRRS30to10v3, etc.) mpasMeshName = oQU240wLI diff --git a/configs/anvil/job_script.anvil.bash b/configs/anvil/job_script.anvil.bash index 11594bd09..5c067cdf5 100644 --- a/configs/anvil/job_script.anvil.bash +++ b/configs/anvil/job_script.anvil.bash @@ -13,5 +13,5 @@ export OMP_NUM_THREADS=1 source /lcrc/soft/climate/e3sm-unified/load_latest_e3sm_unified.sh export HDF5_USE_FILE_LOCKING=FALSE -srun -N 1 -n 1 python -m mpas_analysis configs/polarRegions.conf 20201024.GMPAS-IAF.T62_oQU240wLIC.anvil.cg +srun -N 1 -n 1 python -m mpas_analysis configs/polarRegions.conf 20201025.GMPAS-IAF.T62_oQU240wLI.anvil.cg From 4324bbf0b27d920e187b20775436c4b0c67b6b5c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 25 Oct 2020 16:53:58 -0500 Subject: [PATCH 19/58] Add a test suite for MPAS-Analysis on Anvil --- configs/anvil/test_suite/QU480.cfg | 149 +++++++++++++++++ configs/anvil/test_suite/ctrl.cfg | 149 +++++++++++++++++ configs/anvil/test_suite/job_script.bash | 25 +++ .../job_script_no_polar_regions.bash | 21 +++ configs/anvil/test_suite/main.cfg | 149 +++++++++++++++++ configs/anvil/test_suite/main_vs_ctrl.cfg | 149 +++++++++++++++++ configs/anvil/test_suite/no_ncclimo.cfg | 153 ++++++++++++++++++ configs/anvil/test_suite/test_suite.bash | 126 +++++++++++++++ 8 files changed, 921 insertions(+) create mode 100644 configs/anvil/test_suite/QU480.cfg create mode 100644 configs/anvil/test_suite/ctrl.cfg create mode 100644 configs/anvil/test_suite/job_script.bash create mode 100644 configs/anvil/test_suite/job_script_no_polar_regions.bash create mode 100644 configs/anvil/test_suite/main.cfg create mode 100644 configs/anvil/test_suite/main_vs_ctrl.cfg create mode 100644 configs/anvil/test_suite/no_ncclimo.cfg create mode 100755 configs/anvil/test_suite/test_suite.bash diff --git a/configs/anvil/test_suite/QU480.cfg b/configs/anvil/test_suite/QU480.cfg new file mode 100644 index 000000000..b35d87eea --- /dev/null +++ b/configs/anvil/test_suite/QU480.cfg @@ -0,0 +1,149 @@ +[runs] +## options related to the run to be analyzed and control runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = QU480 + +# config file for a control run to which this run will be compared. The +# analysis should have already been run to completion once with this config +# file, so that the relevant MPAS climatologies already exist and have been +# remapped to the comparison grid. Leave this option commented out if no +# control run is desired. +# controlRunConfigFile = /path/to/config/file + +# config file for a main run on which the analysis was already run to +# completion. The relevant MPAS climatologies already exist and have been +# remapped to the comparison grid and time series have been extracted. +# Leave this option commented out if the analysis for the main run should be +# performed. +# mainRunConfigFile = /path/to/config/file + +[execute] +## options related to executing parallel tasks + +# the number of parallel tasks (1 means tasks run in serial, the default) +parallelTaskCount = 12 + +# the parallelism mode in ncclimo ("serial" or "bck") +# Set this to "bck" (background parallelism) if running on a machine that can +# handle 12 simultaneous processes, one for each monthly climatology. +ncclimoParallelMode = bck + +[diagnostics] +## config options related to observations, mapping files and region files used +## by MPAS-Analysis in diagnostics computations. + +# The base path to the diagnostics directory. Typically, this will be a shared +# directory on each E3SM supported machine (see the example config files for +# its location). For other machines, this would be the directory pointed to +# when running "download_analysis_data.py" to get the public observations, +# mapping files and region files. +baseDirectory = /lcrc/group/acme/diagnostics + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /lcrc/group/acme/ac.xylar/acme_scratch/anvil/20200305.A_WCYCL1850.ne4_oQU480.anvil + +# names of ocean and sea ice meshes (e.g. oEC60to30v3, oQU240v3, oRRS30to10v3, etc.) +mpasMeshName = oQU480 + +# subdirectory containing restart files +runSubdirectory = run +# subdirectory for ocean history files +oceanHistorySubdirectory = archive/ocn/hist +# subdirectory for sea ice history files +seaIceHistorySubdirectory = archive/ice/hist + +# names of namelist and streams files, either a path relative to baseDirectory +# or an absolute path. +oceanNamelistFileName = run/mpaso_in +oceanStreamsFileName = run/streams.ocean +seaIceNamelistFileName = run/mpassi_in +seaIceStreamsFileName = run/streams.seaice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /lcrc/group/acme/ac.xylar/analysis_testing/baseline + +# Anvil doesn't have direct access to a web portal, so output will need +# to be copied elsewhere (e.g. NERSC web portal) +htmlSubdirectory = /lcrc/group/acme/public_html/diagnostic_output/ac.xylar/analysis_testing/baseline + +# a list of analyses to generate. Valid names can be seen by running: +# mpas_analysis --list +# This command also lists tags for each analysis. +# Shortcuts exist to generate (or not generate) several types of analysis. +# These include: +# 'all' -- all analyses will be run +# 'all_' -- all analysis with a particular tag will be run +# 'all_' -- all analyses from a given component (either 'ocean' +# or 'seaIce') will be run +# 'only_', 'only_' -- all analysis from this component or +# with this tag will be run, and all +# analysis for other components or +# without the tag will be skipped +# 'no_' -- skip the given task +# 'no_', 'no_' -- in analogy to 'all_*', skip all analysis +# tasks from the given compoonent or with +# the given tag. Do +# mpas_analysis --list +# to list all task names and their tags +# an equivalent syntax can be used on the command line to override this +# option: +# mpas_analysis config.analysis --generate \ +# all,no_ocean,all_timeSeries +# All tasks with tag "landIceCavities" are disabled because this run did not +# include land-ice cavities. +generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 3 +# the last year over which to average climatalogies +endYear = 5 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[index] +## options related to producing nino index. + +# start and end years for El Nino 3.4 analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[streamfunctionMOC] +## options related to plotting the streamfunction of the meridional overturning +## circulation (MOC) + +# Use postprocessing script to compute the MOC? You want this to be True +# for low-resolution simulations that use GM to parameterize eddies, because +# the online MOC analysis member currently does not include the bolus velocity +# in its calculation, whereas the postprocessing script does. +# NOTE: this is a temporary option that will be removed once the online +# MOC takes into account the bolus velocity when GM is on. +usePostprocessingScript = True diff --git a/configs/anvil/test_suite/ctrl.cfg b/configs/anvil/test_suite/ctrl.cfg new file mode 100644 index 000000000..feee053d6 --- /dev/null +++ b/configs/anvil/test_suite/ctrl.cfg @@ -0,0 +1,149 @@ +[runs] +## options related to the run to be analyzed and control runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = ctrl + +# config file for a control run to which this run will be compared. The +# analysis should have already been run to completion once with this config +# file, so that the relevant MPAS climatologies already exist and have been +# remapped to the comparison grid. Leave this option commented out if no +# control run is desired. +# controlRunConfigFile = /path/to/config/file + +# config file for a main run on which the analysis was already run to +# completion. The relevant MPAS climatologies already exist and have been +# remapped to the comparison grid and time series have been extracted. +# Leave this option commented out if the analysis for the main run should be +# performed. +# mainRunConfigFile = /path/to/config/file + +[execute] +## options related to executing parallel tasks + +# the number of parallel tasks (1 means tasks run in serial, the default) +parallelTaskCount = 12 + +# the parallelism mode in ncclimo ("serial" or "bck") +# Set this to "bck" (background parallelism) if running on a machine that can +# handle 12 simultaneous processes, one for each monthly climatology. +ncclimoParallelMode = bck + +[diagnostics] +## config options related to observations, mapping files and region files used +## by MPAS-Analysis in diagnostics computations. + +# The base path to the diagnostics directory. Typically, this will be a shared +# directory on each E3SM supported machine (see the example config files for +# its location). For other machines, this would be the directory pointed to +# when running "download_analysis_data.py" to get the public observations, +# mapping files and region files. +baseDirectory = /lcrc/group/acme/diagnostics + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /lcrc/group/acme/ac.xylar/acme_scratch/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil + +# names of ocean and sea ice meshes (e.g. oEC60to30v3, oQU240v3, oRRS30to10v3, etc.) +mpasMeshName = oQU240wLI + +# subdirectory containing restart files +runSubdirectory = run +# subdirectory for ocean history files +oceanHistorySubdirectory = archive/ocn/hist +# subdirectory for sea ice history files +seaIceHistorySubdirectory = archive/ice/hist + +# names of namelist and streams files, either a path relative to baseDirectory +# or an absolute path. +oceanNamelistFileName = run/mpaso_in +oceanStreamsFileName = run/streams.ocean +seaIceNamelistFileName = run/mpassi_in +seaIceStreamsFileName = run/streams.seaice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /lcrc/group/acme/ac.xylar/analysis_testing/baseline + +# Anvil doesn't have direct access to a web portal, so output will need +# to be copied elsewhere (e.g. NERSC web portal) +htmlSubdirectory = /lcrc/group/acme/public_html/diagnostic_output/ac.xylar/analysis_testing/baseline + +# a list of analyses to generate. Valid names can be seen by running: +# mpas_analysis --list +# This command also lists tags for each analysis. +# Shortcuts exist to generate (or not generate) several types of analysis. +# These include: +# 'all' -- all analyses will be run +# 'all_' -- all analysis with a particular tag will be run +# 'all_' -- all analyses from a given component (either 'ocean' +# or 'seaIce') will be run +# 'only_', 'only_' -- all analysis from this component or +# with this tag will be run, and all +# analysis for other components or +# without the tag will be skipped +# 'no_' -- skip the given task +# 'no_', 'no_' -- in analogy to 'all_*', skip all analysis +# tasks from the given compoonent or with +# the given tag. Do +# mpas_analysis --list +# to list all task names and their tags +# an equivalent syntax can be used on the command line to override this +# option: +# mpas_analysis config.analysis --generate \ +# all,no_ocean,all_timeSeries +# All tasks with tag "landIceCavities" are disabled because this run did not +# include land-ice cavities. +generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 4 +# the last year over which to average climatalogies +endYear = 8 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[index] +## options related to producing nino index. + +# start and end years for El Nino 3.4 analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[streamfunctionMOC] +## options related to plotting the streamfunction of the meridional overturning +## circulation (MOC) + +# Use postprocessing script to compute the MOC? You want this to be True +# for low-resolution simulations that use GM to parameterize eddies, because +# the online MOC analysis member currently does not include the bolus velocity +# in its calculation, whereas the postprocessing script does. +# NOTE: this is a temporary option that will be removed once the online +# MOC takes into account the bolus velocity when GM is on. +usePostprocessingScript = True diff --git a/configs/anvil/test_suite/job_script.bash b/configs/anvil/test_suite/job_script.bash new file mode 100644 index 000000000..5c10916ee --- /dev/null +++ b/configs/anvil/test_suite/job_script.bash @@ -0,0 +1,25 @@ +#!/bin/bash -l +#SBATCH --nodes=1 +#SBATCH --time=2:00:00 +#SBATCH -A condo +#SBATCH -p acme-small +#SBATCH --job-name=mpas_analysis +#SBATCH --output=mpas_analysis.o%j +#SBATCH --error=mpas_analysis.e%j + +cd $SLURM_SUBMIT_DIR +export OMP_NUM_THREADS=1 + +source /home/ac.xylar/miniconda3/etc/profile.d/conda.sh +conda activate test_env +export HDF5_USE_FILE_LOCKING=FALSE + +echo env: test_env +echo configs: ../configs/polarRegions.conf main.cfg + +mpas_analysis --list +mpas_analysis --plot_colormaps +mpas_analysis --setup_only ../configs/polarRegions.conf main.cfg +mpas_analysis --purge ../configs/polarRegions.conf main.cfg +mpas_analysis --html_only ../configs/polarRegions.conf main.cfg + diff --git a/configs/anvil/test_suite/job_script_no_polar_regions.bash b/configs/anvil/test_suite/job_script_no_polar_regions.bash new file mode 100644 index 000000000..70e4394b8 --- /dev/null +++ b/configs/anvil/test_suite/job_script_no_polar_regions.bash @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH --nodes=1 +#SBATCH --time=2:00:00 +#SBATCH -A condo +#SBATCH -p acme-small +#SBATCH --job-name=mpas_analysis +#SBATCH --output=mpas_analysis.o%j +#SBATCH --error=mpas_analysis.e%j + +cd $SLURM_SUBMIT_DIR +export OMP_NUM_THREADS=1 + +source /home/ac.xylar/miniconda3/etc/profile.d/conda.sh +conda activate test_env +export HDF5_USE_FILE_LOCKING=FALSE + +echo env: test_env +echo configs: no_polar_regions.cfg + +srun -N 1 -n 1 python -m mpas_analysis no_polar_regions.cfg + diff --git a/configs/anvil/test_suite/main.cfg b/configs/anvil/test_suite/main.cfg new file mode 100644 index 000000000..6a058d9ad --- /dev/null +++ b/configs/anvil/test_suite/main.cfg @@ -0,0 +1,149 @@ +[runs] +## options related to the run to be analyzed and control runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = main + +# config file for a control run to which this run will be compared. The +# analysis should have already been run to completion once with this config +# file, so that the relevant MPAS climatologies already exist and have been +# remapped to the comparison grid. Leave this option commented out if no +# control run is desired. +# controlRunConfigFile = /path/to/config/file + +# config file for a main run on which the analysis was already run to +# completion. The relevant MPAS climatologies already exist and have been +# remapped to the comparison grid and time series have been extracted. +# Leave this option commented out if the analysis for the main run should be +# performed. +# mainRunConfigFile = /path/to/config/file + +[execute] +## options related to executing parallel tasks + +# the number of parallel tasks (1 means tasks run in serial, the default) +parallelTaskCount = 12 + +# the parallelism mode in ncclimo ("serial" or "bck") +# Set this to "bck" (background parallelism) if running on a machine that can +# handle 12 simultaneous processes, one for each monthly climatology. +ncclimoParallelMode = bck + +[diagnostics] +## config options related to observations, mapping files and region files used +## by MPAS-Analysis in diagnostics computations. + +# The base path to the diagnostics directory. Typically, this will be a shared +# directory on each E3SM supported machine (see the example config files for +# its location). For other machines, this would be the directory pointed to +# when running "download_analysis_data.py" to get the public observations, +# mapping files and region files. +baseDirectory = /lcrc/group/acme/diagnostics + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /lcrc/group/acme/ac.xylar/acme_scratch/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil + +# names of ocean and sea ice meshes (e.g. oEC60to30v3, oQU240v3, oRRS30to10v3, etc.) +mpasMeshName = oQU240wLI + +# subdirectory containing restart files +runSubdirectory = run +# subdirectory for ocean history files +oceanHistorySubdirectory = archive/ocn/hist +# subdirectory for sea ice history files +seaIceHistorySubdirectory = archive/ice/hist + +# names of namelist and streams files, either a path relative to baseDirectory +# or an absolute path. +oceanNamelistFileName = run/mpaso_in +oceanStreamsFileName = run/streams.ocean +seaIceNamelistFileName = run/mpassi_in +seaIceStreamsFileName = run/streams.seaice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /lcrc/group/acme/ac.xylar/analysis_testing/baseline + +# Anvil doesn't have direct access to a web portal, so output will need +# to be copied elsewhere (e.g. NERSC web portal) +htmlSubdirectory = /lcrc/group/acme/public_html/diagnostic_output/ac.xylar/analysis_testing/baseline + +# a list of analyses to generate. Valid names can be seen by running: +# mpas_analysis --list +# This command also lists tags for each analysis. +# Shortcuts exist to generate (or not generate) several types of analysis. +# These include: +# 'all' -- all analyses will be run +# 'all_' -- all analysis with a particular tag will be run +# 'all_' -- all analyses from a given component (either 'ocean' +# or 'seaIce') will be run +# 'only_', 'only_' -- all analysis from this component or +# with this tag will be run, and all +# analysis for other components or +# without the tag will be skipped +# 'no_' -- skip the given task +# 'no_', 'no_' -- in analogy to 'all_*', skip all analysis +# tasks from the given compoonent or with +# the given tag. Do +# mpas_analysis --list +# to list all task names and their tags +# an equivalent syntax can be used on the command line to override this +# option: +# mpas_analysis config.analysis --generate \ +# all,no_ocean,all_timeSeries +# All tasks with tag "landIceCavities" are disabled because this run did not +# include land-ice cavities. +generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 4 +# the last year over which to average climatalogies +endYear = 8 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[index] +## options related to producing nino index. + +# start and end years for El Nino 3.4 analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[streamfunctionMOC] +## options related to plotting the streamfunction of the meridional overturning +## circulation (MOC) + +# Use postprocessing script to compute the MOC? You want this to be True +# for low-resolution simulations that use GM to parameterize eddies, because +# the online MOC analysis member currently does not include the bolus velocity +# in its calculation, whereas the postprocessing script does. +# NOTE: this is a temporary option that will be removed once the online +# MOC takes into account the bolus velocity when GM is on. +usePostprocessingScript = True diff --git a/configs/anvil/test_suite/main_vs_ctrl.cfg b/configs/anvil/test_suite/main_vs_ctrl.cfg new file mode 100644 index 000000000..8325e1361 --- /dev/null +++ b/configs/anvil/test_suite/main_vs_ctrl.cfg @@ -0,0 +1,149 @@ +[runs] +## options related to the run to be analyzed and control runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = main + +# config file for a control run to which this run will be compared. The +# analysis should have already been run to completion once with this config +# file, so that the relevant MPAS climatologies already exist and have been +# remapped to the comparison grid. Leave this option commented out if no +# control run is desired. +controlRunConfigFile = ctrl.cfg + +# config file for a main run on which the analysis was already run to +# completion. The relevant MPAS climatologies already exist and have been +# remapped to the comparison grid and time series have been extracted. +# Leave this option commented out if the analysis for the main run should be +# performed. +mainRunConfigFile = main_py3.8.cfg + +[execute] +## options related to executing parallel tasks + +# the number of parallel tasks (1 means tasks run in serial, the default) +parallelTaskCount = 12 + +# the parallelism mode in ncclimo ("serial" or "bck") +# Set this to "bck" (background parallelism) if running on a machine that can +# handle 12 simultaneous processes, one for each monthly climatology. +ncclimoParallelMode = bck + +[diagnostics] +## config options related to observations, mapping files and region files used +## by MPAS-Analysis in diagnostics computations. + +# The base path to the diagnostics directory. Typically, this will be a shared +# directory on each E3SM supported machine (see the example config files for +# its location). For other machines, this would be the directory pointed to +# when running "download_analysis_data.py" to get the public observations, +# mapping files and region files. +baseDirectory = /lcrc/group/acme/diagnostics + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /lcrc/group/acme/ac.xylar/acme_scratch/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil + +# names of ocean and sea ice meshes (e.g. oEC60to30v3, oQU240v3, oRRS30to10v3, etc.) +mpasMeshName = oQU240wLI + +# subdirectory containing restart files +runSubdirectory = run +# subdirectory for ocean history files +oceanHistorySubdirectory = archive/ocn/hist +# subdirectory for sea ice history files +seaIceHistorySubdirectory = archive/ice/hist + +# names of namelist and streams files, either a path relative to baseDirectory +# or an absolute path. +oceanNamelistFileName = run/mpaso_in +oceanStreamsFileName = run/streams.ocean +seaIceNamelistFileName = run/mpassi_in +seaIceStreamsFileName = run/streams.seaice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /lcrc/group/acme/ac.xylar/analysis_testing/baseline + +# Anvil doesn't have direct access to a web portal, so output will need +# to be copied elsewhere (e.g. NERSC web portal) +htmlSubdirectory = /lcrc/group/acme/public_html/diagnostic_output/ac.xylar/analysis_testing/baseline + +# a list of analyses to generate. Valid names can be seen by running: +# mpas_analysis --list +# This command also lists tags for each analysis. +# Shortcuts exist to generate (or not generate) several types of analysis. +# These include: +# 'all' -- all analyses will be run +# 'all_' -- all analysis with a particular tag will be run +# 'all_' -- all analyses from a given component (either 'ocean' +# or 'seaIce') will be run +# 'only_', 'only_' -- all analysis from this component or +# with this tag will be run, and all +# analysis for other components or +# without the tag will be skipped +# 'no_' -- skip the given task +# 'no_', 'no_' -- in analogy to 'all_*', skip all analysis +# tasks from the given compoonent or with +# the given tag. Do +# mpas_analysis --list +# to list all task names and their tags +# an equivalent syntax can be used on the command line to override this +# option: +# mpas_analysis config.analysis --generate \ +# all,no_ocean,all_timeSeries +# All tasks with tag "landIceCavities" are disabled because this run did not +# include land-ice cavities. +generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 4 +# the last year over which to average climatalogies +endYear = 8 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[index] +## options related to producing nino index. + +# start and end years for El Nino 3.4 analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[streamfunctionMOC] +## options related to plotting the streamfunction of the meridional overturning +## circulation (MOC) + +# Use postprocessing script to compute the MOC? You want this to be True +# for low-resolution simulations that use GM to parameterize eddies, because +# the online MOC analysis member currently does not include the bolus velocity +# in its calculation, whereas the postprocessing script does. +# NOTE: this is a temporary option that will be removed once the online +# MOC takes into account the bolus velocity when GM is on. +usePostprocessingScript = True diff --git a/configs/anvil/test_suite/no_ncclimo.cfg b/configs/anvil/test_suite/no_ncclimo.cfg new file mode 100644 index 000000000..8e6e37031 --- /dev/null +++ b/configs/anvil/test_suite/no_ncclimo.cfg @@ -0,0 +1,153 @@ +[runs] +## options related to the run to be analyzed and control runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = main + +# config file for a control run to which this run will be compared. The +# analysis should have already been run to completion once with this config +# file, so that the relevant MPAS climatologies already exist and have been +# remapped to the comparison grid. Leave this option commented out if no +# control run is desired. +# controlRunConfigFile = /path/to/config/file + +# config file for a main run on which the analysis was already run to +# completion. The relevant MPAS climatologies already exist and have been +# remapped to the comparison grid and time series have been extracted. +# Leave this option commented out if the analysis for the main run should be +# performed. +# mainRunConfigFile = /path/to/config/file + +[execute] +## options related to executing parallel tasks + +# the number of parallel tasks (1 means tasks run in serial, the default) +parallelTaskCount = 12 + +# the parallelism mode in ncclimo ("serial" or "bck") +# Set this to "bck" (background parallelism) if running on a machine that can +# handle 12 simultaneous processes, one for each monthly climatology. +ncclimoParallelMode = bck + +[diagnostics] +## config options related to observations, mapping files and region files used +## by MPAS-Analysis in diagnostics computations. + +# The base path to the diagnostics directory. Typically, this will be a shared +# directory on each E3SM supported machine (see the example config files for +# its location). For other machines, this would be the directory pointed to +# when running "download_analysis_data.py" to get the public observations, +# mapping files and region files. +baseDirectory = /lcrc/group/acme/diagnostics + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /lcrc/group/acme/ac.xylar/acme_scratch/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil + +# names of ocean and sea ice meshes (e.g. oEC60to30v3, oQU240v3, oRRS30to10v3, etc.) +mpasMeshName = oQU240wLI + +# subdirectory containing restart files +runSubdirectory = run +# subdirectory for ocean history files +oceanHistorySubdirectory = archive/ocn/hist +# subdirectory for sea ice history files +seaIceHistorySubdirectory = archive/ice/hist + +# names of namelist and streams files, either a path relative to baseDirectory +# or an absolute path. +oceanNamelistFileName = run/mpaso_in +oceanStreamsFileName = run/streams.ocean +seaIceNamelistFileName = run/mpassi_in +seaIceStreamsFileName = run/streams.seaice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /lcrc/group/acme/ac.xylar/analysis_testing/baseline + +# Anvil doesn't have direct access to a web portal, so output will need +# to be copied elsewhere (e.g. NERSC web portal) +htmlSubdirectory = /lcrc/group/acme/public_html/diagnostic_output/ac.xylar/analysis_testing/baseline + +# a list of analyses to generate. Valid names can be seen by running: +# mpas_analysis --list +# This command also lists tags for each analysis. +# Shortcuts exist to generate (or not generate) several types of analysis. +# These include: +# 'all' -- all analyses will be run +# 'all_' -- all analysis with a particular tag will be run +# 'all_' -- all analyses from a given component (either 'ocean' +# or 'seaIce') will be run +# 'only_', 'only_' -- all analysis from this component or +# with this tag will be run, and all +# analysis for other components or +# without the tag will be skipped +# 'no_' -- skip the given task +# 'no_', 'no_' -- in analogy to 'all_*', skip all analysis +# tasks from the given compoonent or with +# the given tag. Do +# mpas_analysis --list +# to list all task names and their tags +# an equivalent syntax can be used on the command line to override this +# option: +# mpas_analysis config.analysis --generate \ +# all,no_ocean,all_timeSeries +# All tasks with tag "landIceCavities" are disabled because this run did not +# include land-ice cavities. +generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 4 +# the last year over which to average climatalogies +endYear = 8 + +useNcclimo = False +daskThreads = 12 +subprocessCount = 12 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[index] +## options related to producing nino index. + +# start and end years for El Nino 3.4 analysis. Use endYear = end to indicate +# that the full range of the data should be used. If errorOnMissing = False, +# the start and end year will be clipped to the valid range. Otherwise, out +# of bounds values will lead to an error. In a "control" config file used in +# a "main vs. control" analysis run, the range of years must be valid and +# cannot include "end" because the original data may not be available. +startYear = 1 +endYear = end + +[streamfunctionMOC] +## options related to plotting the streamfunction of the meridional overturning +## circulation (MOC) + +# Use postprocessing script to compute the MOC? You want this to be True +# for low-resolution simulations that use GM to parameterize eddies, because +# the online MOC analysis member currently does not include the bolus velocity +# in its calculation, whereas the postprocessing script does. +# NOTE: this is a temporary option that will be removed once the online +# MOC takes into account the bolus velocity when GM is on. +usePostprocessingScript = True diff --git a/configs/anvil/test_suite/test_suite.bash b/configs/anvil/test_suite/test_suite.bash new file mode 100755 index 000000000..434ee727c --- /dev/null +++ b/configs/anvil/test_suite/test_suite.bash @@ -0,0 +1,126 @@ +#!/usr/bin/env bash + +set -e + +branch=$(git symbolic-ref --short HEAD) + +export HDF5_USE_FILE_LOCKING=FALSE + +source ${HOME}/miniconda3/etc/profile.d/conda.sh + +conda activate base +conda update -y conda conda-build +rm -rf ${HOME}/miniconda3/conda-bld + +# create the test conda envs +for py in 3.7 3.8 +do + env=test_mpas_analysis_py${py} + conda build -m ci/python${py}.yaml ci/recipe + conda remove -y --all -n ${env} + conda create -y -n ${env} --use-local python=${py} mpas-analysis sphinx \ + mock sphinx_rtd_theme "tabulate>=0.8.2" m2r pytest + conda activate ${env} + pytest + conda deactivate +done + +# create another env for testing xarray master branch +env=test_mpas_analysis_xarray_master +conda create --yes --quiet --name ${env} --use-local python=${py} \ + mpas-analysis pytest +conda activate ${env} +pip install git+https://github.com/pydata/xarray.git +pytest +conda deactivate + +# test building the docs +py=3.8 +conda activate test_mpas_analysis_py${py} +cd docs +make clean +make html +cd .. +conda deactivate + +# move to a subdirectory so we use the conda package, not the local package +rm -rf anvil_test_suite +mkdir anvil_test_suite + +cd anvil_test_suite + +template_path=../configs/anvil/test_suite + +for py in 3.7 3.8 +do + env=test_mpas_analysis_py${py} + run=main_py${py} + config=${run}.cfg + job=job_script_${run}.bash + sed "s/baseline/${branch}\/py${py}/g" ${template_path}/main.cfg > ${config} + sed -e "s/main.cfg/${config}/g" -e "s/test_env/${env}/g" \ + ${template_path}/job_script.bash > ${job} +done + + +py=3.8 +env=test_mpas_analysis_py${py} + +run=no_ncclimo +config=${run}.cfg +job=job_script_${run}.bash +sed "s/baseline/${branch}\/${run}/g" ${template_path}/${config} > ${config} +sed -e "s/main.cfg/${config}/g" -e "s/test_env/${env}/g" \ + ${template_path}/job_script.bash > ${job} + +run=ctrl +config=${run}.cfg +job=job_script_${run}.bash +sed "s/baseline/${branch}\/py${py}/g" ${template_path}/${config} > ${config} + +run=main_vs_ctrl +config=${run}.cfg +job=job_script_${run}.bash +sed "s/baseline/${branch}\/${run}/g" ${template_path}/${config} > ${config} +sed -e "s/main.cfg/${config}/g" -e "s/test_env/${env}/g" \ + ${template_path}/job_script.bash > ${job} + +run=no_polar_regions +config=${run}.cfg +job=job_script_${run}.bash +sed "s/baseline/${branch}\/${run}/g" ${template_path}/main.cfg > ${config} +sed -e "s/test_env/${env}/g" ${template_path}/${job} > ${job} + +run=QU480 +config=${run}.cfg +job=job_script_${run}.bash +sed "s/baseline/${branch}\/${run}/g" ${template_path}/${config} > ${config} +sed -e "s/main.cfg/${config}/g" -e "s/test_env/${env}/g" \ + ${template_path}/job_script.bash > ${job} + +env=test_mpas_analysis_xarray_master +run=xarray_master +config=${run}.cfg +job=job_script_${run}.bash +sed "s/baseline/${branch}\/${run}/g" ${template_path}/main.cfg > ${config} +sed -e "s/main.cfg/${config}/g" -e "s/test_env/${env}/g" \ + ${template_path}/job_script.bash > ${job} + + +# submit the jobs +sbatch job_script_main_py3.7.bash + +RES=$(sbatch job_script_main_py3.8.bash) + +sbatch --dependency=afterok:${RES##* } job_script_main_vs_ctrl.bash + +sbatch job_script_no_ncclimo.bash + +sbatch job_script_no_polar_regions.bash + +sbatch job_script_QU480.bash + +sbatch job_script_xarray_master.bash + +cd .. + From 0eb9e80eaf20c9e0320b4d04aee4814772b180e3 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 10 Nov 2020 13:14:57 +0100 Subject: [PATCH 20/58] Fix local package installation directory in Azure CI was using the conda-forge package, not the local one. --- azure-pipelines.yml | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 5ca1e33e4..9bd5527e0 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -46,8 +46,9 @@ jobs: - bash: | eval "$(conda shell.bash hook)" - conda create --yes --quiet --name test --use-local python=$PYTHON_VERSION \ - mpas-analysis pytest + conda activate build + conda create --yes --quiet --name test -c ${CONDA_PREFIX}/conda-bld/ \ + python=$PYTHON_VERSION mpas-analysis pytest displayName: Create Anaconda test environment - bash: | @@ -58,8 +59,10 @@ jobs: - bash: | eval "$(conda shell.bash hook)" - conda create --yes --quiet --name docs --use-local python=$PYTHON_VERSION \ - mpas-analysis sphinx mock sphinx_rtd_theme tabulate m2r + conda activate build + conda create --yes --quiet --name docs -c ${CONDA_PREFIX}/conda-bld/ \ + python=$PYTHON_VERSION mpas-analysis sphinx mock sphinx_rtd_theme \ + tabulate m2r condition: eq(variables['python.version'], '3.8') displayName: Create Anaconda docs environment @@ -155,8 +158,9 @@ jobs: - bash: | eval "$(conda shell.bash hook)" - conda create --yes --quiet --name test --use-local python=$PYTHON_VERSION \ - mpas-analysis pytest + conda activate build + conda create --yes --quiet --name test -c ${CONDA_PREFIX}/conda-bld/ \ + python=$PYTHON_VERSION mpas-analysis pytest conda activate test pip install git+https://github.com/pydata/xarray.git @@ -199,8 +203,9 @@ jobs: - bash: | eval "$(conda shell.bash hook)" - conda create --yes --quiet --name test --use-local python=$PYTHON_VERSION \ - mpas-analysis pytest + conda activate build + conda create --yes --quiet --name test -c ${CONDA_PREFIX}/conda-bld/ \ + python=$PYTHON_VERSION mpas-analysis pytest displayName: Create Anaconda test environment - bash: | From f778d4f2b89a843787e3eac10fb1694bf8d88e69 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 19 Nov 2020 11:29:35 +0100 Subject: [PATCH 21/58] Switch from matplotlib-base to full matplotlib The base isn't enough for what we want to do. --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 2d3484399..3fb6bcdc7 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ environment with the following packages: * python >= 3.6 * numpy * scipy - * matplotlib-base >= 3.0.2 + * matplotlib >= 3.0.2 * netCDF4 * xarray >= 0.14.1 * dask @@ -71,7 +71,7 @@ These can be installed via the conda command: ``` conda config --add channels conda-forge conda config --set channel_priority strict -conda create -n mpas-analysis python=3.8 numpy scipy "matplotlib-base>=3.0.2" \ +conda create -n mpas-analysis python=3.8 numpy scipy "matplotlib>=3.0.2" \ netCDF4 "xarray>=0.14.1" dask bottleneck lxml "nco>=4.8.1" pyproj \ pillow cmocean progressbar2 requests setuptools shapely "cartopy>=0.18.0" \ cartopy_offlinedata "geometric_features>=0.1.9" gsw "pyremap<0.1.0" \ From 0def669ba1d7d8a3210bbbced772539c7ac21661 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 19 Nov 2020 12:07:34 +0100 Subject: [PATCH 22/58] Fix azure badge in README The markdown wasn't quite right. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3fb6bcdc7..a6065f6a7 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # MPAS-Analysis -[![Build Status](https://dev.azure.com/MPAS-Dev/MPAS-Analysis%20testing/_apis/build/status/MPAS-Dev.MPAS-Analysis?branchName=refs%2Fpull%2F728%2Fmerge)](https://dev.azure.com/MPAS-Dev/MPAS-Analysis%20testing/_build/latest?definitionId=2&branchName=refs%2Fpull%2F728%2Fmerge) +[![Build Status](https://dev.azure.com/MPAS-Dev/MPAS-Analysis%20testing/_apis/build/status/MPAS-Dev.MPAS-Analysis?branchName=develop)](https://dev.azure.com/MPAS-Dev/MPAS-Analysis%20testing/_build/latest?definitionId=2&branchName=develop) Analysis for simulations produced with Model for Prediction Across Scales (MPAS) components and the Energy Exascale Earth System Model (E3SM), which From 0f85d40ea8ad285f424595f25047f40bafce9967 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 21 Nov 2020 10:38:08 +0100 Subject: [PATCH 23/58] Break Antarctic melt time series into subtasks per year Hopefully, this will improve performance. --- .../ocean/time_series_antarctic_melt.py | 152 +++++++++++++++--- 1 file changed, 128 insertions(+), 24 deletions(-) diff --git a/mpas_analysis/ocean/time_series_antarctic_melt.py b/mpas_analysis/ocean/time_series_antarctic_melt.py index 596d75bd1..c2f64fa65 100644 --- a/mpas_analysis/ocean/time_series_antarctic_melt.py +++ b/mpas_analysis/ocean/time_series_antarctic_melt.py @@ -87,14 +87,40 @@ def __init__(self, config, mpasTimeSeriesTask, regionMasksTask, masksSubtask = regionMasksTask.add_mask_subtask( self.iceShelfMasksFile, outFileSuffix='iceShelves20200621') - computeMeltSubtask = ComputeMeltSubtask(self, mpasTimeSeriesTask, - masksSubtask, iceShelvesToPlot) - self.add_subtask(computeMeltSubtask) + startYear = config.getint('timeSeries', 'startYear') + endYear = config.get('timeSeries', 'endYear') + if endYear == 'end': + # a valid end year wasn't found, so likely the run was not found, + # perhaps because we're just listing analysis tasks + endYear = startYear + else: + endYear = int(endYear) + + years = list(range(startYear, endYear + 1)) + + # in the end, we'll combine all the time series into one, but we + # create this task first so it's easier to tell it to run after all + # the compute tasks + combineSubtask = CombineMeltSubtask( + self, startYears=years, endYears=years) + + # run one subtask per year + for year in years: + computeSubtask = ComputeMeltSubtask( + self, startYear=year, endYear=year, + mpasTimeSeriesTask=mpasTimeSeriesTask, + masksSubtask=masksSubtask, + iceShelvesToPlot=iceShelvesToPlot) + self.add_subtask(computeSubtask) + computeSubtask.run_after(masksSubtask) + combineSubtask.run_after(computeSubtask) + + self.add_subtask(combineSubtask) for index, iceShelf in enumerate(iceShelvesToPlot): plotMeltSubtask = PlotMeltSubtask(self, iceShelf, index, controlConfig) - plotMeltSubtask.run_after(computeMeltSubtask) + plotMeltSubtask.run_after(combineSubtask) self.add_subtask(plotMeltSubtask) # }}} @@ -121,8 +147,8 @@ class ComputeMeltSubtask(AnalysisTask): # {{{ # ------- # Xylar Asay-Davis, Stephen Price - def __init__(self, parentTask, mpasTimeSeriesTask, masksSubtask, - iceShelvesToPlot): # {{{ + def __init__(self, parentTask, startYear, endYear, mpasTimeSeriesTask, + masksSubtask, iceShelvesToPlot): # {{{ """ Construct the analysis task. @@ -151,7 +177,8 @@ def __init__(self, parentTask, mpasTimeSeriesTask, masksSubtask, taskName=parentTask.taskName, componentName=parentTask.componentName, tags=parentTask.tags, - subtaskName='computeMeltRates') + subtaskName='computeMeltRates_{:04d}-{:04d}'.format(startYear, + endYear)) self.mpasTimeSeriesTask = mpasTimeSeriesTask self.run_after(mpasTimeSeriesTask) @@ -160,6 +187,13 @@ def __init__(self, parentTask, mpasTimeSeriesTask, masksSubtask, self.run_after(masksSubtask) self.iceShelvesToPlot = iceShelvesToPlot + self.restartFileName = None + self.startYear = startYear + self.endYear = endYear + self.startDate = '{:04d}-01-01_00:00:00'.format(self.startYear) + self.endDate = '{:04d}-12-31_23:59:59'.format(self.endYear) + self.variableList = \ + ['timeMonthly_avg_landIceFreshwaterFlux'] # }}} def setup_and_check(self): # {{{ @@ -189,8 +223,6 @@ def setup_and_check(self): # {{{ analysisOptionName='config_am_timeseriesstatsmonthly_enable', raiseException=True) - config = self.config - landIceFluxMode = self.namelist.get('config_land_ice_flux_mode') if landIceFluxMode not in ['standalone', 'coupled']: raise ValueError('*** timeSeriesAntarcticMelt requires ' @@ -206,13 +238,6 @@ def setup_and_check(self): # {{{ raise IOError('No MPAS-O restart file found: need at least one ' 'restart file for Antarctic melt calculations') - # get a list of timeSeriesStats output files from the streams file, - # reading only those that are between the start and end dates - self.startDate = config.get('timeSeries', 'startDate') - self.endDate = config.get('timeSeries', 'endDate') - - self.variableList = \ - ['timeMonthly_avg_landIceFreshwaterFlux'] self.mpasTimeSeriesTask.add_variables(variableList=self.variableList) return # }}} @@ -230,10 +255,16 @@ def run_task(self): # {{{ mpasTimeSeriesTask = self.mpasTimeSeriesTask config = self.config - baseDirectory = build_config_full_path( - config, 'output', 'timeSeriesSubdirectory') + outputDirectory = '{}/iceShelfFluxes/'.format( + build_config_full_path(config, 'output', 'timeseriesSubdirectory')) + + try: + os.makedirs(outputDirectory) + except OSError: + pass - outFileName = '{}/iceShelfAggregatedFluxes.nc'.format(baseDirectory) + outFileName = '{}/iceShelfFluxes_{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYear, self.endYear) # Load data: inputFile = mpasTimeSeriesTask.outputFile @@ -253,7 +284,7 @@ def run_task(self): # {{{ 'it.'.format(outFileName)) os.remove(outFileName) except OSError: - # something is potentailly wrong with the file, so let's delete + # something is potentially wrong with the file, so let's delete # it and try again self.logger.warning('Problems reading file {}. Deleting ' 'it.'.format(outFileName)) @@ -331,7 +362,76 @@ def run_task(self): # {{{ write_netcdf(dsOut, outFileName) # }}} + # }}} + + +class CombineMeltSubtask(AnalysisTask): # {{{ + """ + Combine individual time series into a single data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, startYears, endYears): # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``TimeSeriesOceanRegions`` + The main task of which this is a subtask + + startYears, endYears : list + The beginning and end of each time series to combine + """ + # Authors + # ------- + # Xylar Asay-Davis + + subtaskName = 'combineAntarcticMeltTimeSeries' + + # first, call the constructor from the base class (AnalysisTask) + super(CombineMeltSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName=subtaskName) + + self.startYears = startYears + self.endYears = endYears + # }}} + def run_task(self): # {{{ + """ + Combine the time series + """ + # Authors + # ------- + # Xylar Asay-Davis + + outputDirectory = '{}/iceShelfFluxes/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory')) + + outFileName = '{}/iceShelfFluxes_{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYears[0], self.endYears[-1]) + + if not os.path.exists(outFileName): + inFileNames = [] + for startYear, endYear in zip(self.startYears, self.endYears): + inFileName = '{}/iceShelfFluxes_{:04d}-{:04d}.nc'.format( + outputDirectory, startYear, endYear) + inFileNames.append(inFileName) + + ds = xarray.open_mfdataset(inFileNames, combine='nested', + concat_dim='Time', decode_times=False) + + ds.load() + + write_netcdf(ds, outFileName) + # }}} # }}} @@ -457,7 +557,7 @@ def run_task(self): # {{{ self._load_ice_shelf_fluxes(self.controlConfig) # Load observations from multiple files and put in dictionary based - # on shelf keyname + # on shelf key name observationsDirectory = build_obs_path(config, 'ocean', 'meltSubdirectory') obsFileNameDict = {'Rignot et al. (2013)': @@ -658,10 +758,14 @@ def _load_ice_shelf_fluxes(self, config): # {{{ # ------- # Xylar Asay-Davis - baseDirectory = build_config_full_path( - config, 'output', 'timeSeriesSubdirectory') + outputDirectory = '{}/iceShelfFluxes/'.format( + build_config_full_path(config, 'output', 'timeseriesSubdirectory')) + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.getint('timeSeries', 'endYear') - outFileName = '{}/iceShelfAggregatedFluxes.nc'.format(baseDirectory) + outFileName = '{}/iceShelfFluxes_{:04d}-{:04d}.nc'.format( + outputDirectory, startYear, endYear) dsOut = xarray.open_dataset(outFileName) return dsOut.totalMeltFlux, dsOut.meltRates From b8ed04c28a2505de3c87231ef7c06199436b8eaa Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 21 Nov 2020 02:57:42 -0800 Subject: [PATCH 24/58] print ice shelf names in log --- mpas_analysis/ocean/time_series_antarctic_melt.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/ocean/time_series_antarctic_melt.py b/mpas_analysis/ocean/time_series_antarctic_melt.py index c2f64fa65..d44a50a58 100644 --- a/mpas_analysis/ocean/time_series_antarctic_melt.py +++ b/mpas_analysis/ocean/time_series_antarctic_melt.py @@ -314,6 +314,8 @@ def run_task(self): # {{{ # select only those regions we want to plot dsRegionMask = dsRegionMask.isel(nRegions=regionIndices) + regionNames = decode_strings(dsRegionMask.regionNames) + datasets = [] nTime = dsIn.sizes['Time'] for tIndex in range(nTime): @@ -327,6 +329,7 @@ def run_task(self): # {{{ totalMeltFluxes = numpy.zeros((nRegions,)) for regionIndex in range(nRegions): + self.logger.info(' {}'.format(regionNames[regionIndex])) cellMask = \ dsRegionMask.regionCellMasks.isel(nRegions=regionIndex) @@ -346,8 +349,8 @@ def run_task(self): # {{{ dsOut = xarray.Dataset() dsOut.coords['Time'] = dsIn.Time.isel(Time=tIndex) - dsOut['totalMeltFlux'] = (('nRegions'), totalMeltFluxes) - dsOut['meltRates'] = (('nRegions'), meltRates) + dsOut['totalMeltFlux'] = (('nRegions',), totalMeltFluxes) + dsOut['meltRates'] = (('nRegions',), meltRates) datasets.append(dsOut) dsOut = xarray.concat(objs=datasets, dim='Time') From 72a63a9f03b9d5d23bbcad3ffe28eb7f079bcf14 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 13 Sep 2020 11:03:49 +0200 Subject: [PATCH 25/58] Add WOA18 and SOSE obs. to time series of ocean regions --- mpas_analysis/config.default | 3 + .../ocean/time_series_ocean_regions.py | 325 +++++++++++++++++- 2 files changed, 325 insertions(+), 3 deletions(-) diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 5b0b7a87b..78d89c5b4 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -1287,6 +1287,9 @@ variables = [{'name': 'temperature', # zmin = -1000 # zmax = -400 +# Obserational data sets to compare against +obs = ['SOSE', 'WOA18'] + [timeSeriesTransport] ## options related to plotting time series of transport through transects diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index 9a24ea082..c09dc57d5 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -26,7 +26,7 @@ from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf from mpas_analysis.shared.io.utility import build_config_full_path, \ - get_files_year_month, decode_strings, get_region_mask + build_obs_path, get_files_year_month, decode_strings, get_region_mask from mpas_analysis.shared.html import write_image_xml @@ -34,6 +34,8 @@ from mpas_analysis.ocean.utility import compute_zmid +from mpas_analysis.shared.constants import constants + class TimeSeriesOceanRegions(AnalysisTask): # {{{ """ @@ -82,6 +84,40 @@ def __init__(self, config, regionMasksTask, controlConfig=None): regionGroups = config.getExpression(self.taskName, 'regionGroups') + obsDicts = { + 'SOSE': { + 'suffix': 'SOSE', + 'gridName': 'SouthernOcean_0.167x0.167degree', + 'gridFileName': 'SOSE/SOSE_2005-2010_monthly_pot_temp_' + 'SouthernOcean_0.167x0.167degree_20180710.nc', + 'TFileName': 'SOSE/SOSE_2005-2010_monthly_pot_temp_' + 'SouthernOcean_0.167x0.167degree_20180710.nc', + 'SFileName': 'SOSE/SOSE_2005-2010_monthly_salinity_' + 'SouthernOcean_0.167x0.167degree_20180710.nc', + 'volFileName': 'SOSE/SOSE_volume_' + 'SouthernOcean_0.167x0.167degree_20190815.nc', + 'lonVar': 'lon', + 'latVar': 'lat', + 'TVar': 'theta', + 'SVar': 'salinity', + 'volVar': 'volume', + 'zVar': 'z', + 'tDim': 'Time'}, + 'WOA18': { + 'suffix': 'WOA18', + 'gridName': 'Global_0.25x0.25degree', + 'gridFileName': 'WOA18/woa18_decav_04_TS_mon_20190829.nc', + 'TFileName': 'WOA18/woa18_decav_04_TS_mon_20190829.nc', + 'SFileName': 'WOA18/woa18_decav_04_TS_mon_20190829.nc', + 'volFileName': None, + 'lonVar': 'lon', + 'latVar': 'lat', + 'TVar': 't_an', + 'SVar': 's_an', + 'volVar': 'volume', + 'zVar': 'depth', + 'tDim': 'month'}} + for regionGroup in regionGroups: sectionSuffix = regionGroup[0].upper() + \ regionGroup[1:].replace(' ', '') @@ -103,6 +139,25 @@ def __init__(self, config, regionMasksTask, controlConfig=None): years = list(range(startYear, endYear + 1)) + obsList = config.getExpression(sectionName, 'obs') + groupObsDicts = {} + + for obsName in obsList: + localObsDict = dict(obsDicts[obsName]) + obsFileName = build_obs_path( + config, component=self.componentName, + relativePath=localObsDict['gridFileName']) + obsMasksSubtask = regionMasksTask.add_mask_subtask( + regionMaskFile, outFileSuffix=regionMaskSuffix, + obsFileName=obsFileName, lonVar=localObsDict['lonVar'], + latVar=localObsDict['latVar'], + meshName=localObsDict['gridName']) + + obsDicts[obsName]['maskTask'] = obsMasksSubtask + + localObsDict['maskTask'] = obsMasksSubtask + groupObsDicts[obsName] = localObsDict + # in the end, we'll combine all the time series into one, but we # create this task first so it's easier to tell it to run after all # the compute tasks @@ -133,9 +188,18 @@ def __init__(self, config, regionMasksTask, controlConfig=None): fullSuffix = sectionSuffix + '_' + regionName[0].lower() + \ regionName[1:].replace(' ', '') + obsSubtasks = {} + for obsName in obsList: + localObsDict = dict(groupObsDicts[obsName]) + + obsSubtask = ComputeObsRegionalTimeSeriesSubtask( + self, regionGroup, regionName, fullSuffix, + masksSubtask, localObsDict) + obsSubtasks[obsName] = obsSubtask + plotRegionSubtask = PlotRegionTimeSeriesSubtask( self, regionGroup, regionName, index, controlConfig, - sectionName, fullSuffix) + sectionName, fullSuffix, obsSubtasks) plotRegionSubtask.run_after(combineSubtask) self.add_subtask(plotRegionSubtask) @@ -671,6 +735,232 @@ def run_task(self): # {{{ # }}} +class ComputeObsRegionalTimeSeriesSubtask(AnalysisTask): + """ + Compute the regional mean of the obs climatology + + Attributes + ---------- + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, regionGroup, regionName, fullSuffix, + masksSubtask, obsDict): + # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + regionGroup : str + Name of the collection of region to plot + + regionName : str + Name of the region to plot + + fullSuffix : str + The regionGroup and regionName combined and modified to be + appropriate as a task or file suffix + + masksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask files for each region to plot, used + to get the mask file name + + obsDict : dict + Information on the observations to compare against + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeObsRegionalTimeSeriesSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='compute{}_{}'.format(fullSuffix, obsDict['suffix'])) + + self.regionGroup = regionGroup + self.regionName = regionName + self.masksSubtask = masksSubtask + self.obsDict = obsDict + self.prefix = fullSuffix[0].lower() + fullSuffix[1:] + + timeSeriesName = regionGroup[0].lower() + \ + regionGroup[1:].replace(' ', '') + outputDirectory = '{}/{}/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory'), + timeSeriesName) + + self.outFileName = '{}/TS_{}_{}.nc'.format( + outputDirectory, obsDict['suffix'], self.prefix) + + self.run_after(obsDict['maskTask']) + # }}} + + def run_task(self): # {{{ + """ + Compute time-series output of properties in an ocean region. + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.logger.info("\nAveraging T and S for {}...".format( + self.regionName)) + + obsDict = self.obsDict + config = self.config + + regionGroup = self.regionGroup + timeSeriesName = regionGroup[0].lower() + \ + regionGroup[1:].replace(' ', '') + + sectionSuffix = regionGroup[0].upper() + \ + regionGroup[1:].replace(' ', '') + sectionName = 'timeSeries{}'.format(sectionSuffix) + + outputDirectory = '{}/{}/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory'), + timeSeriesName) + + outFileName = '{}/TS_{}_{}.nc'.format( + outputDirectory, obsDict['suffix'], self.prefix) + + if os.path.exists(outFileName): + return + + regionMaskFileName = obsDict['maskTask'].maskFileName + + dsRegionMask = \ + xarray.open_dataset(regionMaskFileName).stack( + nCells=(obsDict['latVar'], obsDict['lonVar'])) + dsRegionMask = dsRegionMask.reset_index('nCells').drop_vars( + [obsDict['latVar'], obsDict['lonVar']]) + + maskRegionNames = decode_strings(dsRegionMask.regionNames) + regionIndex = maskRegionNames.index(self.regionName) + + dsMask = dsRegionMask.isel(nRegions=regionIndex) + + cellMask = dsMask.regionCellMasks == 1 + + if config.has_option(sectionName, 'zmin'): + zmin = config.getfloat(sectionName, 'zmin') + else: + zmin = dsMask.zminRegions.values + + if config.has_option(sectionName, 'zmax'): + zmax = config.getfloat(sectionName, 'zmax') + else: + zmax = dsMask.zmaxRegions.values + + TVarName = obsDict['TVar'] + SVarName = obsDict['SVar'] + zVarName = obsDict['zVar'] + lonVarName = obsDict['lonVar'] + latVarName = obsDict['latVar'] + volVarName = obsDict['volVar'] + tDim = obsDict['tDim'] + + obsFileName = build_obs_path( + config, component=self.componentName, + relativePath=obsDict['TFileName']) + self.logger.info(' Reading from {}...'.format(obsFileName)) + + ds = xarray.open_dataset(obsFileName) + if obsDict['SFileName'] != obsDict['TFileName']: + obsFileName = build_obs_path( + config, component=self.componentName, + relativePath=obsDict['SFileName']) + self.logger.info(' Reading from {}...'.format(obsFileName)) + dsS = xarray.open_dataset(obsFileName) + ds[SVarName] = dsS[SVarName] + + if obsDict['volFileName'] is None: + # compute volume from lat, lon, depth bounds + self.logger.info(' Computing volume...'.format(obsFileName)) + latBndsName = ds[latVarName].attrs['bounds'] + lonBndsName = ds[lonVarName].attrs['bounds'] + zBndsName = ds[zVarName].attrs['bounds'] + latBnds = ds[latBndsName] + lonBnds = ds[lonBndsName] + zBnds = ds[zBndsName] + dLat = numpy.deg2rad(latBnds[:, 1] - latBnds[:, 0]) + dLon = numpy.deg2rad(lonBnds[:, 1] - lonBnds[:, 0]) + lat = numpy.deg2rad(ds[latVarName]) + dz = zBnds[:, 1] - zBnds[:, 0] + radius = 6378137.0 + area = radius**2*numpy.cos(lat)*dLat*dLon + volume = dz*area + ds[volVarName] = volume + + elif obsDict['volFileName'] != obsDict['TFileName']: + obsFileName = build_obs_path( + config, component=self.componentName, + relativePath=obsDict['volFileName']) + self.logger.info(' Reading from {}...'.format(obsFileName)) + dsVol = xarray.open_dataset(obsFileName) + ds[volVarName] = dsVol[volVarName] + + if 'positive' in ds[zVarName].attrs and \ + ds[zVarName].attrs['positive'] == 'down': + attrs = ds[zVarName].attrs + ds[zVarName] = -ds[zVarName] + ds[zVarName].attrs = attrs + ds[zVarName].attrs['positive'] = 'up' + + TMean = numpy.zeros(ds.sizes[tDim]) + SMean = numpy.zeros(ds.sizes[tDim]) + + depthMask = numpy.logical_and(ds[zVarName] >= zmin, + ds[zVarName] <= zmax) + + for tIndex in range(ds.sizes[tDim]): + dsMonth = ds.isel({tDim: tIndex}) + dsMonth = dsMonth.stack(nCells=(obsDict['latVar'], + obsDict['lonVar'])) + dsMonth = dsMonth.reset_index('nCells').drop_vars( + [obsDict['latVar'], obsDict['lonVar']]) + + dsMonth = dsMonth.where(cellMask, drop=True) + + dsMonth = dsMonth.where(depthMask) + + mask = dsMonth[TVarName].notnull() + TSum = (dsMonth[TVarName]*dsMonth[volVarName]).sum(dim=('nCells', + zVarName)) + volSum = (mask*dsMonth[volVarName]).sum(dim=('nCells', zVarName)) + TMean[tIndex] = TSum/volSum + + mask = dsMonth[SVarName].notnull() + SSum = (dsMonth[SVarName]*dsMonth[volVarName]).sum(dim=('nCells', + zVarName)) + volSum = (mask*dsMonth[volVarName]).sum(dim=('nCells', zVarName)) + SMean[tIndex] = SSum/volSum + + dsOut = xarray.Dataset() + dsOut['temperature'] = ('Time', TMean) + dsOut['salinity'] = ('Time', SMean) + dsOut['zbounds'] = ('nBounds', [zmin, zmax]) + dsOut['month'] = ('Time', numpy.array(ds.month.values, dtype=float)) + dsOut['year'] = ('Time', numpy.ones(ds.sizes[tDim])) + write_netcdf(dsOut, outFileName) + + # }}} + + # }}} + + class PlotRegionTimeSeriesSubtask(AnalysisTask): """ Plots time-series output of properties in an ocean region. @@ -698,7 +988,7 @@ class PlotRegionTimeSeriesSubtask(AnalysisTask): # Xylar Asay-Davis def __init__(self, parentTask, regionGroup, regionName, regionIndex, - controlConfig, sectionName, fullSuffix): + controlConfig, sectionName, fullSuffix, obsSubtasks): # {{{ """ Construct the analysis task. @@ -727,6 +1017,9 @@ def __init__(self, parentTask, regionGroup, regionName, regionIndex, fullSuffix : str The regionGroup and regionName combined and modified to be appropriate as a task or file suffix + + obsSubtasks : dict of ``AnalysisTasks`` + Subtasks for computing the mean observed T and S in the region """ # Authors # ------- @@ -746,6 +1039,10 @@ def __init__(self, parentTask, regionGroup, regionName, regionIndex, self.sectionName = sectionName self.controlConfig = controlConfig self.prefix = fullSuffix[0].lower() + fullSuffix[1:] + self.obsSubtasks = obsSubtasks + + for obsName in obsSubtasks: + self.run_after(obsSubtasks[obsName]) # }}} @@ -878,6 +1175,28 @@ def run_task(self): # {{{ lineWidths.append(1.2) legendText.append(controlRunName) + if varName in ['temperature', 'salinity']: + obsColors = ['b', 'g', 'm'] + for obsName in self.obsSubtasks: + obsFileName = self.obsSubtasks[obsName].outFileName + dsObs = xarray.open_dataset(obsFileName) + endMonthDays = numpy.cumsum(constants.daysInMonth) + midMonthDays = endMonthDays - 0.5*constants.daysInMonth + + obsTime = [] + obsField = [] + for year in range(startYear, endYear+1): + obsTime.append(midMonthDays + 365.*(year-1.)) + obsField.append(dsObs[varName]) + obsTime = numpy.array(obsTime).ravel() + obsField = numpy.array(obsField).ravel() + da = xarray.DataArray(data=obsField, dims='Time', + coords=[('Time', obsTime)]) + fields.append(da) + lineColors.append(obsColors.pop(0)) + lineWidths.append(1.2) + legendText.append(obsName) + if is3d: if not plotControl or numpy.all(zbounds == zboundsRef): title = '{} ({} < z < {} m)'.format(title, zbounds[0], From f4ca932c09f6d32a8c254b0bb30d64c08ef3531b Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 13 Sep 2020 14:55:24 -0700 Subject: [PATCH 26/58] Switch ocean region time series obs to ANN mean --- .../ocean/time_series_ocean_regions.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index c09dc57d5..0d2b2e946 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -102,7 +102,8 @@ def __init__(self, config, regionMasksTask, controlConfig=None): 'SVar': 'salinity', 'volVar': 'volume', 'zVar': 'z', - 'tDim': 'Time'}, + 'tDim': 'Time', + 'legend': 'SOSE 2005-2010 ANN mean'}, 'WOA18': { 'suffix': 'WOA18', 'gridName': 'Global_0.25x0.25degree', @@ -116,7 +117,8 @@ def __init__(self, config, regionMasksTask, controlConfig=None): 'SVar': 's_an', 'volVar': 'volume', 'zVar': 'depth', - 'tDim': 'month'}} + 'tDim': 'month', + 'legend': 'WOA18 1955-2017 ANN mean'}} for regionGroup in regionGroups: sectionSuffix = regionGroup[0].upper() + \ @@ -1177,25 +1179,27 @@ def run_task(self): # {{{ if varName in ['temperature', 'salinity']: obsColors = ['b', 'g', 'm'] + daysInMonth = constants.daysInMonth for obsName in self.obsSubtasks: obsFileName = self.obsSubtasks[obsName].outFileName + obsDict = self.obsSubtasks[obsName].obsDict dsObs = xarray.open_dataset(obsFileName) - endMonthDays = numpy.cumsum(constants.daysInMonth) - midMonthDays = endMonthDays - 0.5*constants.daysInMonth + endMonthDays = numpy.cumsum(daysInMonth) + midMonthDays = endMonthDays - 0.5*daysInMonth obsTime = [] - obsField = [] + fieldMean = \ + numpy.sum(dsObs[varName].values*daysInMonth)/365. for year in range(startYear, endYear+1): obsTime.append(midMonthDays + 365.*(year-1.)) - obsField.append(dsObs[varName]) obsTime = numpy.array(obsTime).ravel() - obsField = numpy.array(obsField).ravel() + obsField = fieldMean*numpy.ones(obsTime.shape) da = xarray.DataArray(data=obsField, dims='Time', coords=[('Time', obsTime)]) fields.append(da) lineColors.append(obsColors.pop(0)) lineWidths.append(1.2) - legendText.append(obsName) + legendText.append(obsDict['legend']) if is3d: if not plotControl or numpy.all(zbounds == zboundsRef): From 9a49215fb61454177752ecc043affe9512ba2002 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 13 Sep 2020 11:07:31 +0200 Subject: [PATCH 27/58] Change axis labels and default colormap for T-S diagrams There is now a single x and y axis label for all panels. The color map no longer fades to white at low density. --- mpas_analysis/config.default | 2 +- mpas_analysis/ocean/regional_ts_diagrams.py | 24 +++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 78d89c5b4..5fbc4f0d9 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -1365,7 +1365,7 @@ Sbins = numpy.linspace(33.8, 34.8, 1001) rhoInterval = 0.1 # The color map for depth or volume -colormap = white_cmo_deep +colormap = cmo.deep # The following is more appropriate if diagramType == 'scatter' # colormap = cmo.deep_r # the type of norm used in the colormap {'linear', 'log'} diff --git a/mpas_analysis/ocean/regional_ts_diagrams.py b/mpas_analysis/ocean/regional_ts_diagrams.py index 693aea586..c3897fa0a 100644 --- a/mpas_analysis/ocean/regional_ts_diagrams.py +++ b/mpas_analysis/ocean/regional_ts_diagrams.py @@ -1102,6 +1102,30 @@ def run_task(self): # {{{ inset = add_inset(fig, fc, width=1.5, height=1.5) + # add an empty plot covering the subplots to give common axis labels + pos0 = axarray[0, 0].get_position() + pos1 = axarray[-1, -1].get_position() + pos_common = [pos0.x0, pos1.y0, pos1.x1-pos0.x0, pos0.y1-pos1.y0] + print(pos_common) + common_ax = fig.add_axes(pos_common, zorder=-2) + common_ax.spines['top'].set_color('none') + common_ax.spines['bottom'].set_color('none') + common_ax.spines['left'].set_color('none') + common_ax.spines['right'].set_color('none') + common_ax.tick_params(labelcolor='w', top=False, bottom=False, + left=False, right=False) + + common_ax.set_xlabel('Salinity (PSU)', **axis_font) + common_ax.set_ylabel(r'Potential temperature ($^\circ$C)', **axis_font) + + # turn off labels for individual plots (just used for spacing) + for index in range(len(axisIndices)): + row = nRows-1 - index//nCols + col = numpy.mod(index, nCols) + ax = axarray[row, col] + ax.set_xlabel('') + ax.set_ylabel('') + # move the color bar down a little ot avoid the inset pos0 = inset.get_position() pos1 = axarray[-1, -1].get_position() From d95c0db0e3f7409044df2897ec1fdb3974e88bd4 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 21 Nov 2020 15:26:19 -0600 Subject: [PATCH 28/58] Coarsen the bins for the T/S diagrams Otherwise, they are not dense enough to be well sampled at lower resolution. --- mpas_analysis/config.default | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 5fbc4f0d9..d71bc5567 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -1358,8 +1358,8 @@ diagramType = volumetric # if diagramType == 'volumetric', the bin boundaries for T and S # if diagramType == 'scatter', only the min and max are important (and the # bins are only used for computing neutral density contours) -Tbins = numpy.linspace(-2.5, 4, 651) -Sbins = numpy.linspace(33.8, 34.8, 1001) +Tbins = numpy.linspace(-2.5, 4, 131) +Sbins = numpy.linspace(33.8, 34.8, 201) # density contour interval rhoInterval = 0.1 From 8eea999e4171f8a076ac55fe37f4cae8dc195476 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 21 Nov 2020 15:35:49 -0600 Subject: [PATCH 29/58] Add support for user-specified min/max vol in TS diagrams --- mpas_analysis/config.default | 5 +++++ mpas_analysis/ocean/regional_ts_diagrams.py | 13 ++++++++++--- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index d71bc5567..7e332ca1a 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -1377,6 +1377,11 @@ normType = log # zmin = -1000 # zmax = -400 +# the minimum and maximum volume for the colorbar, default is the minimum and +# maximum over the mode output +# volMin = 3e9 +# volMax = 1e12 + # Obserational data sets to compare against obs = ['SOSE', 'WOA18'] diff --git a/mpas_analysis/ocean/regional_ts_diagrams.py b/mpas_analysis/ocean/regional_ts_diagrams.py index c3897fa0a..3406097d2 100644 --- a/mpas_analysis/ocean/regional_ts_diagrams.py +++ b/mpas_analysis/ocean/regional_ts_diagrams.py @@ -1025,8 +1025,14 @@ def run_task(self): # {{{ raise ValueError('Unexpected diagramType {}'.format(diagramType)) lastPanel = None - volMinMpas = None - volMaxMpas = None + if config.has_option(sectionName, 'volMin'): + volMinMpas = config.getfloat(sectionName, 'volMin') + else: + volMinMpas = None + if config.has_option(sectionName, 'volMax'): + volMaxMpas = config.getfloat(sectionName, 'volMax') + else: + volMaxMpas = None for index in range(len(axisIndices)): panelIndex = axisIndices[index] @@ -1052,8 +1058,9 @@ def run_task(self): # {{{ lastPanel, volMin, volMax = \ self._plot_volumetric_panel(T, S, volume) - if index == 0: + if volMinMpas is None: volMinMpas = volMin + if volMaxMpas is None: volMaxMpas = volMax if normType == 'linear': norm = colors.Normalize(vmin=0., vmax=volMaxMpas) From 66582b4fa2854f2c31cfe1699fc70d6f59dd8037 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 22 Nov 2020 06:58:56 -0600 Subject: [PATCH 30/58] Remove melt rate plots from QU480 test It doesn't have ice-shelf cavities so melt tests will fail. --- configs/anvil/test_suite/QU480.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/configs/anvil/test_suite/QU480.cfg b/configs/anvil/test_suite/QU480.cfg index b35d87eea..8557328c0 100644 --- a/configs/anvil/test_suite/QU480.cfg +++ b/configs/anvil/test_suite/QU480.cfg @@ -100,7 +100,8 @@ htmlSubdirectory = /lcrc/group/acme/public_html/diagnostic_output/ac.xylar/analy # all,no_ocean,all_timeSeries # All tasks with tag "landIceCavities" are disabled because this run did not # include land-ice cavities. -generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke'] +generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke', + 'no_landIceCavities'] [climatology] ## options related to producing climatologies, typically to compare against From 6a1961c235a3e12fade46c9717667cd26c55cfc2 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 22 Nov 2020 07:12:35 -0600 Subject: [PATCH 31/58] Add a script for cleaning up test output This should be run before rerunning the test suite. --- configs/anvil/test_suite/clean_suilte.bash | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100755 configs/anvil/test_suite/clean_suilte.bash diff --git a/configs/anvil/test_suite/clean_suilte.bash b/configs/anvil/test_suite/clean_suilte.bash new file mode 100755 index 000000000..4faccc6a9 --- /dev/null +++ b/configs/anvil/test_suite/clean_suilte.bash @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e + +branch=$(git symbolic-ref --short HEAD) + +rm -rf anvil_test_suite +rm -rf /lcrc/group/acme/ac.xylar/analysis_testing/${branch} +rm -rf /lcrc/group/acme/public_html/diagnostic_output/ac.xylar/analysis_testing/${branch} From c3512c6fc3e2b08ed673bde03ff650411bfee550 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 22 Nov 2020 08:06:27 -0600 Subject: [PATCH 32/58] Add anvil_test_suite to .gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 6aa55d687..c581671df 100644 --- a/.gitignore +++ b/.gitignore @@ -93,3 +93,6 @@ ENV/ .ropeproject .DS_Store + +# Anvil test suite +/anvil_test_suite/ From 6f98d0f95ba80966c7381cd519cdbd57e3bd884c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 22 Nov 2020 08:10:26 -0600 Subject: [PATCH 33/58] Add missing mkdir for ocean region obs Also, rename mask file for better readability. --- mpas_analysis/ocean/time_series_ocean_regions.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index 0d2b2e946..a1087e42a 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -295,8 +295,8 @@ def run_task(self): # {{{ except OSError: pass - outFileName = '{}/depthMasks{}.nc'.format(outputDirectory, - timeSeriesName) + outFileName = '{}/depthMasks_{}.nc'.format(outputDirectory, + timeSeriesName) if os.path.exists(outFileName): self.logger.info(' Mask file exists -- Done.') @@ -563,8 +563,8 @@ def run_task(self): # {{{ self.logger.info(' Time series exists -- Done.') return - regionMaskFileName = '{}/depthMasks{}.nc'.format(outputDirectory, - timeSeriesName) + regionMaskFileName = '{}/depthMasks_{}.nc'.format(outputDirectory, + timeSeriesName) dsRegionMask = xarray.open_dataset(regionMaskFileName) nRegions = dsRegionMask.sizes['nRegions'] areaCell = dsRegionMask.areaCell @@ -835,6 +835,11 @@ def run_task(self): # {{{ 'timeseriesSubdirectory'), timeSeriesName) + try: + os.makedirs(outputDirectory) + except OSError: + pass + outFileName = '{}/TS_{}_{}.nc'.format( outputDirectory, obsDict['suffix'], self.prefix) From dc19a0da14421a557e13f929ad8d756ed68446f3 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 22 Nov 2020 08:14:20 -0600 Subject: [PATCH 34/58] Give an explicit end time for time series The automatic value doesn't work for main vs. ctrl --- configs/anvil/test_suite/ctrl.cfg | 4 ++-- configs/anvil/test_suite/main.cfg | 4 ++-- configs/anvil/test_suite/main_vs_ctrl.cfg | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/configs/anvil/test_suite/ctrl.cfg b/configs/anvil/test_suite/ctrl.cfg index feee053d6..9443453a6 100644 --- a/configs/anvil/test_suite/ctrl.cfg +++ b/configs/anvil/test_suite/ctrl.cfg @@ -122,7 +122,7 @@ endYear = 8 # a "main vs. control" analysis run, the range of years must be valid and # cannot include "end" because the original data may not be available. startYear = 1 -endYear = end +endYear = 8 [index] ## options related to producing nino index. @@ -134,7 +134,7 @@ endYear = end # a "main vs. control" analysis run, the range of years must be valid and # cannot include "end" because the original data may not be available. startYear = 1 -endYear = end +endYear = 8 [streamfunctionMOC] ## options related to plotting the streamfunction of the meridional overturning diff --git a/configs/anvil/test_suite/main.cfg b/configs/anvil/test_suite/main.cfg index 6a058d9ad..86d5923d0 100644 --- a/configs/anvil/test_suite/main.cfg +++ b/configs/anvil/test_suite/main.cfg @@ -122,7 +122,7 @@ endYear = 8 # a "main vs. control" analysis run, the range of years must be valid and # cannot include "end" because the original data may not be available. startYear = 1 -endYear = end +endYear = 8 [index] ## options related to producing nino index. @@ -134,7 +134,7 @@ endYear = end # a "main vs. control" analysis run, the range of years must be valid and # cannot include "end" because the original data may not be available. startYear = 1 -endYear = end +endYear = 8 [streamfunctionMOC] ## options related to plotting the streamfunction of the meridional overturning diff --git a/configs/anvil/test_suite/main_vs_ctrl.cfg b/configs/anvil/test_suite/main_vs_ctrl.cfg index 8325e1361..1c7ca0c7d 100644 --- a/configs/anvil/test_suite/main_vs_ctrl.cfg +++ b/configs/anvil/test_suite/main_vs_ctrl.cfg @@ -122,7 +122,7 @@ endYear = 8 # a "main vs. control" analysis run, the range of years must be valid and # cannot include "end" because the original data may not be available. startYear = 1 -endYear = end +endYear = 8 [index] ## options related to producing nino index. @@ -134,7 +134,7 @@ endYear = end # a "main vs. control" analysis run, the range of years must be valid and # cannot include "end" because the original data may not be available. startYear = 1 -endYear = end +endYear = 8 [streamfunctionMOC] ## options related to plotting the streamfunction of the meridional overturning From 5c9cd402c373a57db7392d71a86cfaad874941c4 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 22 Nov 2020 08:23:13 -0600 Subject: [PATCH 35/58] Fix stream name for min/max stream --- mpas_analysis/shared/climatology/mpas_climatology_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mpas_analysis/shared/climatology/mpas_climatology_task.py b/mpas_analysis/shared/climatology/mpas_climatology_task.py index b9ecfbc54..fbe1dcc6f 100644 --- a/mpas_analysis/shared/climatology/mpas_climatology_task.py +++ b/mpas_analysis/shared/climatology/mpas_climatology_task.py @@ -698,7 +698,7 @@ def _preprocess(ds): fileNames = sorted(parentTask.inputFiles) years, months = get_files_year_month( fileNames, self.historyStreams, - 'timeSeriesStatsMonthlyOutput') + parentTask.streamName) with xarray.open_mfdataset(parentTask.inputFiles, combine='nested', From e31c9c76d297b128a1681b7b4d1692ca29aa298b Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 22 Nov 2020 11:08:38 -0600 Subject: [PATCH 36/58] Make setup verbose in tests --- configs/anvil/test_suite/job_script.bash | 2 +- configs/anvil/test_suite/job_script_no_polar_regions.bash | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/configs/anvil/test_suite/job_script.bash b/configs/anvil/test_suite/job_script.bash index 5c10916ee..a10698a50 100644 --- a/configs/anvil/test_suite/job_script.bash +++ b/configs/anvil/test_suite/job_script.bash @@ -20,6 +20,6 @@ echo configs: ../configs/polarRegions.conf main.cfg mpas_analysis --list mpas_analysis --plot_colormaps mpas_analysis --setup_only ../configs/polarRegions.conf main.cfg -mpas_analysis --purge ../configs/polarRegions.conf main.cfg +mpas_analysis --purge ../configs/polarRegions.conf main.cfg --verbose mpas_analysis --html_only ../configs/polarRegions.conf main.cfg diff --git a/configs/anvil/test_suite/job_script_no_polar_regions.bash b/configs/anvil/test_suite/job_script_no_polar_regions.bash index 70e4394b8..3e4190d49 100644 --- a/configs/anvil/test_suite/job_script_no_polar_regions.bash +++ b/configs/anvil/test_suite/job_script_no_polar_regions.bash @@ -17,5 +17,5 @@ export HDF5_USE_FILE_LOCKING=FALSE echo env: test_env echo configs: no_polar_regions.cfg -srun -N 1 -n 1 python -m mpas_analysis no_polar_regions.cfg +srun -N 1 -n 1 python -m mpas_analysis no_polar_regions.cfg --verbose From 8b0f3fabd06200a509139aef32d49bb55a20be7e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 22 Nov 2020 05:38:47 -0600 Subject: [PATCH 37/58] Reduce the range for melt-rate plots to +/- 20 m/yr This helps to show more of the important detail, particularly for lower melt rates. --- configs/polarRegions.conf | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/configs/polarRegions.conf b/configs/polarRegions.conf index d85093748..dcbde8166 100644 --- a/configs/polarRegions.conf +++ b/configs/polarRegions.conf @@ -255,3 +255,17 @@ movingAverageMonths = 12 # limits on depth, the full range by default yLim = [-600., -5.] + + +[climatologyMapAntarcticMelt] +## options related to plotting horizontally regridded maps of Antarctic +## sub-ice-shelf melt rates against control model results and observations + +# A dictionary with keywords for the norm +normArgsResult = {'linthresh': 1., 'linscale': 0.5, 'vmin': -20., 'vmax': 20.} +colorbarTicksResult = [-20., -10., -5., -2., -1., 0., 1., 2., 5., 10., 20.] + +# A dictionary with keywords for the norm +normArgsDifference = {'linthresh': 1., 'linscale': 0.5, 'vmin': -20., + 'vmax': 20.} +colorbarTicksDifference = [-20., -10., -5., -2., -1., 0., 1., 2., 5., 10., 20.] From b10291770f6bbc6c91521e0c3208a1504482a17e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 18 Nov 2020 08:57:15 +0100 Subject: [PATCH 38/58] Add Rignot obs for Antarctic, FRIS, Ross --- mpas_analysis/ocean/time_series_antarctic_melt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/ocean/time_series_antarctic_melt.py b/mpas_analysis/ocean/time_series_antarctic_melt.py index d44a50a58..16d4be4e3 100644 --- a/mpas_analysis/ocean/time_series_antarctic_melt.py +++ b/mpas_analysis/ocean/time_series_antarctic_melt.py @@ -564,9 +564,9 @@ def run_task(self): # {{{ observationsDirectory = build_obs_path(config, 'ocean', 'meltSubdirectory') obsFileNameDict = {'Rignot et al. (2013)': - 'Rignot_2013_melt_rates_20200623.csv', + 'Rignot_2013_melt_rates_20201117.csv', 'Rignot et al. (2013) SS': - 'Rignot_2013_melt_rates_SS_20200623.csv'} + 'Rignot_2013_melt_rates_SS_20201117.csv'} obsDict = {} # dict for storing dict of obs data for obsName in obsFileNameDict: From e3efd0a8efa6714ace4afce314521fb2eadb6349 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 18 Nov 2020 08:56:26 +0100 Subject: [PATCH 39/58] Use cartopy coastline by default in polar projection plots The user can optionally switch back to the old behavior of using the MPAS coastline. Lat/lon contours have been added (partly to cover up a seam in the coastline geometry). The default color for continents is now the lighter gray used for polar projections, and this also applies to global plots. --- mpas_analysis/config.default | 7 ++ .../ocean/plot_climatology_map_subtask.py | 18 +++- mpas_analysis/shared/plot/climatology_map.py | 86 +++++++++++++------ 3 files changed, 79 insertions(+), 32 deletions(-) diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 7e332ca1a..af713a0c3 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -311,6 +311,13 @@ threePanelAxisFontSize = 12 dpi = 200 +[polarProjection] +## options related to polar-projection plots + +# whether to use the cartopy coastline (as opposed to the model coastline) +useCartopyCoastline = True + + [html] ## options related to generating a webpage to display the analysis diff --git a/mpas_analysis/ocean/plot_climatology_map_subtask.py b/mpas_analysis/ocean/plot_climatology_map_subtask.py index 6dbd28bcb..57d8fa366 100644 --- a/mpas_analysis/ocean/plot_climatology_map_subtask.py +++ b/mpas_analysis/ocean/plot_climatology_map_subtask.py @@ -30,9 +30,10 @@ from mpas_analysis.shared.html import write_image_xml - from mpas_analysis.shared.climatology import \ get_remapped_mpas_climatology_file_name +from mpas_analysis.shared.climatology.comparison_descriptors import \ + get_comparison_descriptor from mpas_analysis.ocean.utility import nans_to_numpy_mask @@ -548,8 +549,10 @@ def _plot_polar(self, remappedModelClimatology, bias = modelOutput - refOutput - x = interp_extrap_corner(remappedModelClimatology['x'].values) - y = interp_extrap_corner(remappedModelClimatology['y'].values) + comparisonDescriptor = get_comparison_descriptor( + config, comparisonGridName) + x = comparisonDescriptor.xCorner + y = comparisonDescriptor.yCorner filePrefix = self.filePrefix outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) @@ -557,6 +560,12 @@ def _plot_polar(self, remappedModelClimatology, self.fieldNameInTitle, season, self.startYear, self.endYear) + if self.comparisonGridName == 'antarctic': + hemisphere = 'south' + else: + # arctic + hemisphere = 'north' + plot_polar_projection_comparison( config, x, @@ -571,7 +580,8 @@ def _plot_polar(self, remappedModelClimatology, modelTitle='{}'.format(mainRunName), refTitle=self.refTitleLabel, diffTitle=self.diffTitleLabel, - cbarlabel=self.unitsLabel) + cbarlabel=self.unitsLabel, + hemisphere=hemisphere) upperGridName = comparisonGridName[0].upper() + comparisonGridName[1:] caption = '{} {}'.format(season, self.imageCaption) diff --git a/mpas_analysis/shared/plot/climatology_map.py b/mpas_analysis/shared/plot/climatology_map.py index faeace0ed..08ce9a316 100644 --- a/mpas_analysis/shared/plot/climatology_map.py +++ b/mpas_analysis/shared/plot/climatology_map.py @@ -419,7 +419,8 @@ def plot_polar_projection_comparison( dpi=None, lineWidth=0.5, lineColor='black', - vertical=False): + vertical=False, + hemisphere='north'): """ Plots a data set as a longitude/latitude map. @@ -490,27 +491,43 @@ def plot_polar_projection_comparison( def plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, lineWidth, lineColor): - plt.title(title, y=1.06, **plottitle_font) + ax.set_title(title, y=1.06, **plottitle_font) + + ax.set_extent(extent, crs=projection) + + gl = ax.gridlines(crs=cartopy.crs.PlateCarree(), color='k', + linestyle=':', zorder=5, draw_labels=True) + gl.xlocator = mticker.FixedLocator(np.arange(-180., 181., 60.)) + gl.ylocator = mticker.FixedLocator(np.arange(-80., 81., 10.)) + gl.n_steps = 100 + gl.right_labels = False + gl.xformatter = cartopy.mpl.gridliner.LONGITUDE_FORMATTER + gl.yformatter = cartopy.mpl.gridliner.LATITUDE_FORMATTER if levels is None: - plotHandle = plt.pcolormesh(x, y, array, cmap=colormap, norm=norm) + plotHandle = ax.pcolormesh(x, y, array, cmap=colormap, norm=norm) else: - plotHandle = plt.contourf(xCenter, yCenter, array, cmap=colormap, - norm=norm, levels=levels, extend='both') + plotHandle = ax.contourf(xCenter, yCenter, array, cmap=colormap, + norm=norm, levels=levels, extend='both') - plt.pcolormesh(x, y, landMask, cmap=landColorMap) - plt.contour(xCenter, yCenter, landMask.mask, (0.5,), colors='k', - linewidths=0.5) + if useCartopyCoastline: + _add_land_lakes_coastline(ax, ice_shelves=False) + else: + # add the model coastline + plt.pcolormesh(x, y, landMask, cmap=landColorMap) + plt.contour(xCenter, yCenter, landMask.mask, (0.5,), colors='k', + linewidths=0.5) if contours is not None: matplotlib.rcParams['contour.negative_linestyle'] = 'solid' - plt.contour(x, y, array, levels=contours, colors=lineColor, - linewidths=lineWidth) + ax.contour(x, y, array, levels=contours, colors=lineColor, + linewidths=lineWidth) # create an axes on the right side of ax. The width of cax will be 5% # of ax and the padding between cax and ax will be fixed at 0.05 inch. divider = make_axes_locatable(ax) - cax = divider.append_axes("right", size="5%", pad=0.05) + cax = divider.append_axes("right", size="5%", pad=0.05, + axes_class=plt.Axes) cbar = plt.colorbar(plotHandle, cax=cax) cbar.set_label(cbarlabel) @@ -518,10 +535,8 @@ def plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, cbar.set_ticks(ticks) cbar.set_ticklabels(['{}'.format(tick) for tick in ticks]) - ax.axis('off') - ax.set_aspect('equal') - ax.autoscale(tight=True) - + useCartopyCoastline = config.getboolean('polarProjection', + 'useCartopyCoastline') # set up figure if dpi is None: dpi = config.getint('plot', 'dpi') @@ -544,7 +559,7 @@ def plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, fig = plt.figure(figsize=figsize, dpi=dpi) - if (title is not None): + if title is not None: if titleFontSize is None: titleFontSize = config.get('plot', 'titleFontSize') title_font = {'size': titleFontSize, @@ -556,42 +571,57 @@ def plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, 'threePanelPlotTitleFontSize')} # set up land colormap - colorList = [(0.8, 0.8, 0.8), (0.8, 0.8, 0.8)] - landColorMap = cols.LinearSegmentedColormap.from_list('land', colorList) + if not useCartopyCoastline: + colorList = [(0.8, 0.8, 0.8), (0.8, 0.8, 0.8)] + landColorMap = cols.LinearSegmentedColormap.from_list('land', colorList) # locations of centers for contour plots xCenter = 0.5 * (x[1:] + x[0:-1]) yCenter = 0.5 * (y[1:] + y[0:-1]) - ax = plt.subplot(subplots[0]) + if hemisphere == 'north': + projection = cartopy.crs.Stereographic( + central_latitude=90., central_longitude=0.0, + true_scale_latitude=75.0) + elif hemisphere == 'south': + projection = cartopy.crs.Stereographic( + central_latitude=-90., central_longitude=0.0, + true_scale_latitude=-71.0) + else: + raise ValueError('Unexpected hemisphere {}'.format( + hemisphere)) + extent = [x[0], x[-1], y[0], y[-1]] + + ax = plt.subplot(subplots[0], projection=projection) plot_panel(ax, modelTitle, modelArray, **dictModelRef) if refArray is not None: - ax = plt.subplot(subplots[1]) + ax = plt.subplot(subplots[1], projection=projection) plot_panel(ax, refTitle, refArray, **dictModelRef) - ax = plt.subplot(subplots[2]) + ax = plt.subplot(subplots[2], projection=projection) plot_panel(ax, diffTitle, diffArray, **dictDiff) - if (fileout is not None): + if fileout is not None: plt.savefig(fileout, dpi=dpi, bbox_inches='tight', pad_inches=0.1) plt.close() -def _add_land_lakes_coastline(ax): +def _add_land_lakes_coastline(ax, ice_shelves=True): land_50m = cartopy.feature.NaturalEarthFeature( 'physical', 'land', '50m', edgecolor='k', - facecolor='gray', linewidth=0.5) - ice_50m = cartopy.feature.NaturalEarthFeature( - 'physical', 'antarctic_ice_shelves_polys', '50m', edgecolor='k', - facecolor='lightgray', linewidth=0.5) + facecolor='#cccccc', linewidth=0.5) lakes_50m = cartopy.feature.NaturalEarthFeature( 'physical', 'lakes', '50m', edgecolor='k', facecolor='white', linewidth=0.5) ax.add_feature(land_50m, zorder=2) - ax.add_feature(ice_50m, zorder=3) + if ice_shelves: + ice_50m = cartopy.feature.NaturalEarthFeature( + 'physical', 'antarctic_ice_shelves_polys', '50m', edgecolor='k', + facecolor='lightgray', linewidth=0.5) + ax.add_feature(ice_50m, zorder=3) ax.add_feature(lakes_50m, zorder=4) # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python From 9add3ea97febe01d7dea82ebfcf78715ad60cf19 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 22 Nov 2020 10:40:34 -0600 Subject: [PATCH 40/58] Move the legend to the lower left for regional profiles It is defaulting to the upper right in some cases, and therefore getting covered by the inset. --- mpas_analysis/ocean/ocean_regional_profiles.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mpas_analysis/ocean/ocean_regional_profiles.py b/mpas_analysis/ocean/ocean_regional_profiles.py index 7d9d244ea..87083b9d1 100644 --- a/mpas_analysis/ocean/ocean_regional_profiles.py +++ b/mpas_analysis/ocean/ocean_regional_profiles.py @@ -851,7 +851,7 @@ def plot(self, zArrays, fieldArrays, errArrays, lineColors, lineWidths, facecolor=color, alpha=0.2) if plotLegend and len(zArrays) > 1: - plt.legend() + plt.legend(loc='lower left') axis_font = {'size': config.get('plot', 'axisFontSize')} title_font = {'size': config.get('plot', 'titleFontSize'), From 36d6abf04627e4433e02993fbe6bcec5ab37aff4 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 25 Nov 2020 01:10:26 -0800 Subject: [PATCH 41/58] Send melt tables to "tables" directory Also, fix selecting of single Time level. --- mpas_analysis/config.default | 1 + .../ocean/climatology_map_antarctic_melt.py | 24 +++++++++++-------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 7e332ca1a..53f6cb087 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -160,6 +160,7 @@ maskSubdirectory = masks # provide an absolute path to put HTML in an alternative location (e.g. a web # portal) htmlSubdirectory = html +tablesSubdirectory = tables # a list of analyses to generate. Valid names can be seen by running: # mpas_analysis --list diff --git a/mpas_analysis/ocean/climatology_map_antarctic_melt.py b/mpas_analysis/ocean/climatology_map_antarctic_melt.py index 2a1573589..0f2df3822 100644 --- a/mpas_analysis/ocean/climatology_map_antarctic_melt.py +++ b/mpas_analysis/ocean/climatology_map_antarctic_melt.py @@ -21,7 +21,7 @@ from mpas_analysis.shared import AnalysisTask from mpas_analysis.shared.io.utility import build_obs_path, get_region_mask, \ - decode_strings + decode_strings, build_config_full_path from mpas_analysis.shared.io import write_netcdf from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \ @@ -417,7 +417,7 @@ def run_task(self): # {{{ dsIn = xr.open_dataset(inFileName) freshwaterFlux = dsIn[mpasFieldName] if 'Time' in freshwaterFlux.dims: - freshwaterFlux.isel(Time=0) + freshwaterFlux = freshwaterFlux.isel(Time=0) regionMaskFileName = self.masksSubtask.maskFileName @@ -497,10 +497,16 @@ def run_task(self): # {{{ regionNames = decode_strings(ds.regionNames) - tableFileName = get_masked_mpas_climatology_file_name( - config, self.season, self.componentName, - climatologyName='antarcticMeltRateTable') - tableFileName = tableFileName.replace('.nc', '.csv') + outDirectory = '{}/antarcticMelt/'.format( + build_config_full_path(config, 'output', 'tablesSubdirectory')) + + try: + os.makedirs(outDirectory) + except OSError: + pass + + tableFileName = '{}/antarcticMeltRateTable_{}.csv'.format(outDirectory, + self.season) with open(tableFileName, 'w', newline='') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=fieldNames) @@ -515,10 +521,8 @@ def run_task(self): # {{{ '{}'.format(dsControl.meltRates[index].values) writer.writerow(row) - tableFileName = get_masked_mpas_climatology_file_name( - config, self.season, self.componentName, - climatologyName='antarcticMeltFluxTable') - tableFileName = tableFileName.replace('.nc', '.csv') + tableFileName = '{}/antarcticMeltFluxTable_{}.csv'.format(outDirectory, + self.season) with open(tableFileName, 'w', newline='') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=fieldNames) From be18f45fd8ea12affc32ae60c52d213172d6e7c3 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 21:44:47 +0100 Subject: [PATCH 42/58] Update region masks to use aggregation --- mpas_analysis/shared/regions/__init__.py | 2 +- .../shared/regions/compute_region_masks.py | 32 ++-- .../regions/compute_region_masks_subtask.py | 159 ++++++++++++++---- 3 files changed, 136 insertions(+), 57 deletions(-) diff --git a/mpas_analysis/shared/regions/__init__.py b/mpas_analysis/shared/regions/__init__.py index dc1d6de27..f3efdbe83 100644 --- a/mpas_analysis/shared/regions/__init__.py +++ b/mpas_analysis/shared/regions/__init__.py @@ -1,5 +1,5 @@ from mpas_analysis.shared.regions.compute_region_masks_subtask \ - import ComputeRegionMasksSubtask, get_feature_list + import ComputeRegionMasksSubtask, get_feature_list, get_region_info from mpas_analysis.shared.regions.compute_region_masks \ import ComputeRegionMasks diff --git a/mpas_analysis/shared/regions/compute_region_masks.py b/mpas_analysis/shared/regions/compute_region_masks.py index e24e84d6d..33cbda8c4 100644 --- a/mpas_analysis/shared/regions/compute_region_masks.py +++ b/mpas_analysis/shared/regions/compute_region_masks.py @@ -13,8 +13,6 @@ from mpas_analysis.shared.regions.compute_region_masks_subtask \ import ComputeRegionMasksSubtask -from mpas_analysis.shared.io.utility import get_region_mask - class ComputeRegionMasks(AnalysisTask): """ @@ -52,21 +50,17 @@ def __init__(self, config, conponentName): self.regionMaskSubtasks = {} - def add_mask_subtask(self, geojsonFileName, outFileSuffix, obsFileName=None, - lonVar='lon', latVar='lat', meshName=None, - useMpasMaskCreator=True): + def add_mask_subtask(self, regionGroup, obsFileName=None, lonVar='lon', + latVar='lat', meshName=None, useMpasMaskCreator=True): """ Construct the analysis task and adds it as a subtask of the ``parentTask``. Parameters ---------- - geojsonFileName : str - A geojson file, typically from the MPAS ``geometric_features`` - repository, defining the shapes to be masked - - outFileSuffix : str - The suffix for the resulting mask file + regionGroup : str + The name of one of the supported region groups (see + :py:func:`mpas_analysis.shared.regions.get_region_mask()`) obsFileName : str, optional The name of an observations file to create masks for. But default, @@ -94,11 +88,10 @@ def add_mask_subtask(self, geojsonFileName, outFileSuffix, obsFileName=None, if meshName is None: meshName = config.get('input', 'mpasMeshName') - maskFileName = get_region_mask( - config, '{}_{}.nc'.format(meshName, outFileSuffix)) + key = '{} {}'.format(meshName, regionGroup) + + if key not in self.regionMaskSubtasks: - if maskFileName not in self.regionMaskSubtasks: - subtaskName = '{}_{}'.format(meshName, outFileSuffix) subprocessCount = config.getWithDefault('execute', 'parallelTaskCount', default=1) @@ -110,14 +103,13 @@ def add_mask_subtask(self, geojsonFileName, outFileSuffix, obsFileName=None, subprocessCount = 1 maskSubtask = ComputeRegionMasksSubtask( - self, geojsonFileName, outFileSuffix, - featureList=None, subtaskName=subtaskName, + self, regionGroup=regionGroup, meshName=meshName, subprocessCount=subprocessCount, obsFileName=obsFileName, - lonVar=lonVar, latVar=latVar, meshName=meshName, + lonVar=lonVar, latVar=latVar, useMpasMaskCreator=useMpasMaskCreator) self.add_subtask(maskSubtask) - self.regionMaskSubtasks[maskFileName] = maskSubtask + self.regionMaskSubtasks[key] = maskSubtask - return self.regionMaskSubtasks[maskFileName] + return self.regionMaskSubtasks[key] diff --git a/mpas_analysis/shared/regions/compute_region_masks_subtask.py b/mpas_analysis/shared/regions/compute_region_masks_subtask.py index 1230625bf..99b178b70 100644 --- a/mpas_analysis/shared/regions/compute_region_masks_subtask.py +++ b/mpas_analysis/shared/regions/compute_region_masks_subtask.py @@ -9,9 +9,6 @@ # distributed with this code, or at # https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/master/LICENSE -from __future__ import absolute_import, division, print_function, \ - unicode_literals - import os import xarray as xr import numpy @@ -20,9 +17,12 @@ from multiprocessing import Pool import progressbar from functools import partial -from geometric_features import read_feature_collection import mpas_tools.conversion +from geometric_features import read_feature_collection, GeometricFeatures +from geometric_features.aggregation.ocean import basins, subbasins, antarctic, \ + ice_shelves + from mpas_analysis.shared.analysis_task import AnalysisTask from mpas_analysis.shared.io.utility import build_config_full_path, \ @@ -30,6 +30,60 @@ from mpas_analysis.shared.io import write_netcdf +def get_region_info(regionGroup, config): + """ + Get a geojson mask file and the appropriate file suffix for the given + region group. + + Parameters + ---------- + regionGroup : str + The name of a region group to get mask features for, one of + 'Antarctic Regions', 'Ocean Basins', 'Ice Shelves', or 'Ocean Subbasins' + + config : mpas_analysis.configuration.MpasAnalysisConfigParser + Configuration options + + Returns + ------- + region : dict + A dictionary of information about the region + + filename : str + The name of a geojson file with mask features + + suffix : str + A suffix to use for mask files created with these features + + """ + + regions = {'Antarctic Regions': {'prefix': 'antarcticRegions', + 'date': '20200621', + 'function': antarctic}, + 'Ocean Basins': {'prefix': 'oceanBasins', + 'date': '20200621', + 'function': basins}, + 'Ice Shelves': {'prefix': 'iceShelves', + 'date': '20200621', + 'function': ice_shelves}, + 'Ocean Subbasins': {'prefix': 'oceanSubbasins', + 'date': '20201123', + 'function': subbasins}} + + if regionGroup not in regions: + raise ValueError('Unknown region group {}'.format(regionGroup)) + + region = regions[regionGroup] + + prefix = region['prefix'] + date = region['date'] + + suffix = '{}{}'.format(prefix, date) + filename = get_region_mask(config, '{}.geojson'.format(suffix)) + + return region, filename, suffix + + def get_feature_list(geojsonFileName): """ Builds a list of features found in the geojson file @@ -250,6 +304,8 @@ def compute_region_masks(geojsonFileName, cellPoints, maskFileName, progressbar.Bar(), ' ', progressbar.ETA()] bar = progressbar.ProgressBar(widgets=widgets, max_value=nChunks).start() + else: + bar = None mask = numpy.zeros((nCells,), bool) for iChunk, maskChunk in \ @@ -281,6 +337,14 @@ class ComputeRegionMasksSubtask(AnalysisTask): # {{{ Attributes ---------- + regionGroup : str + The name of one of the supported region groups (see + :py:func:`mpas_analysis.shared.regions.get_region_info()`) + + region : dict + A dictionary of information about the region from + :py:func:`mpas_analysis.shared.regions.get_region_info()` + geojsonFileName : str A geojson file, typically from the MPAS ``geometric_features`` repository, defining the shapes to be masked @@ -294,9 +358,6 @@ class ComputeRegionMasksSubtask(AnalysisTask): # {{{ maskFileName : str The name of the output mask file - maskExists : bool - Whether the mask file already exists - obsFileName : str The name of an observations file to create masks for. But default, lon/lat are taken from an MPAS restart file @@ -312,10 +373,9 @@ class ComputeRegionMasksSubtask(AnalysisTask): # {{{ # ------- # Xylar Asay-Davis - def __init__(self, parentTask, geojsonFileName, outFileSuffix, - featureList=None, subtaskName='computeRegionMasks', - subprocessCount=1, obsFileName=None, lonVar='lon', - latVar='lat', meshName=None, useMpasMaskCreator=False): + def __init__(self, parentTask, regionGroup, meshName, subprocessCount=1, + obsFileName=None, lonVar='lon', latVar='lat', + useMpasMaskCreator=False): # {{{ """ Construct the analysis task and adds it as a subtask of the @@ -327,19 +387,14 @@ def __init__(self, parentTask, geojsonFileName, outFileSuffix, The parent task, used to get the ``taskName``, ``config`` and ``componentName`` - geojsonFileName : str - A geojson file, typically from the MPAS ``geometric_features`` - repository, defining the shapes to be masked + regionGroup : str + The name of one of the supported region groups (see + :py:func:`mpas_analysis.shared.regions.get_region_info()`) - outFileSuffix : str - The suffix for the resulting mask file - - featureList : list of str, optional - A list of features to include. Default is all features in all - files + meshName : str + The name of the mesh or grid, used as part of the mask file name. + Default is the MPAS mesh name - subtaskName : str, optional - The name of the subtask subprocessCount : int, optional The number of processes that can be used to make the mask @@ -351,10 +406,6 @@ def __init__(self, parentTask, geojsonFileName, outFileSuffix, lonVar, latVar : str, optional The name of the longitude and latitude variables in ``obsFileName`` - meshName : str, optional - The name of the mesh or grid, used as part of the mask file name. - Default is the MPAS mesh name - useMpasMaskCreator : bool, optional If ``True``, the mask creator from ``mpas_tools`` will be used to create the mask. Otherwise, python code is used. @@ -363,6 +414,9 @@ def __init__(self, parentTask, geojsonFileName, outFileSuffix, # ------- # Xylar Asay-Davis + suffix = regionGroup.replace(' ', '') + subtaskName = '{}_{}'.format(meshName, suffix) + # call the constructor from the base class (AnalysisTask) super(ComputeRegionMasksSubtask, self).__init__( config=parentTask.config, @@ -371,9 +425,8 @@ def __init__(self, parentTask, geojsonFileName, outFileSuffix, componentName=parentTask.componentName, tags=[]) - self.geojsonFileName = geojsonFileName - self.outFileSuffix = outFileSuffix - self.featureList = featureList + self.regionGroup = regionGroup + self.featureList = None self.subprocessCount = subprocessCount self.obsFileName = obsFileName @@ -381,6 +434,11 @@ def __init__(self, parentTask, geojsonFileName, outFileSuffix, self.latVar = latVar self.meshName = meshName self.useMpasMaskCreator = useMpasMaskCreator + self.useMpasMesh = self.obsFileName is None + self.maskFileName = None + + self.region, self.geojsonFileName, self.outFileSuffix = get_region_info( + self.regionGroup, self.config) if not self.useMpasMaskCreator: # because this uses a Pool, it cannot be launched as a separate @@ -391,6 +449,39 @@ def __init__(self, parentTask, geojsonFileName, outFileSuffix, # }}} + def make_region_mask(self): + """ + If the geojson mask file has not already been cached in the diagnostics + or custom diagnostic directories, it will be created in the analysis + output's masks directory. + """ + function = self.region['function'] + filename = self.geojsonFileName + if not os.path.exists(filename): + gf = GeometricFeatures() + fc = function(gf) + fc.to_geojson(filename) + + def expand_region_names(self, regionNames): + """ + If ``regionNames`` contains ``'all'``, make sure the geojson file exists + and then return all the region names found in the file. + + Parameters + ---------- + regionNames : list + A list of region names + + Returns + ------- + regionNames : list + A list of region names + """ + if 'all' in regionNames: + self.make_region_mask() + regionNames = get_feature_list(self.geojsonFileName) + return regionNames + def setup_and_check(self): # {{{ """ Perform steps to set up the analysis and check for errors in the setup. @@ -413,7 +504,6 @@ def setup_and_check(self): # {{{ # self.calendar super(ComputeRegionMasksSubtask, self).setup_and_check() - self.useMpasMesh = self.obsFileName is None if self.useMpasMesh: try: self.obsFileName = self.runStreams.readpath('restart')[0] @@ -425,12 +515,6 @@ def setup_and_check(self): # {{{ 'maskSubdirectory') make_directories(maskSubdirectory) - if self.meshName is None: - self.meshName = self.config.get('input', 'mpasMeshName') - - # first, see if we have cached a mask file name in the region masks - # directory - self.maskFileName = get_region_mask( self.config, '{}_{}.nc'.format(self.meshName, self.outFileSuffix)) @@ -460,6 +544,9 @@ def run_task(self): # {{{ if os.path.exists(self.maskFileName): return + # make the geojson file if it doesn't exist + self.make_region_mask() + if self.featureList is None: # get a list of features for use by other tasks (e.g. to determine # plot names) From 0df42bf6a329beabcee366b298b7c289146d310c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 22:03:24 +0100 Subject: [PATCH 43/58] Switch transects to using aggregation --- mpas_analysis/shared/transects/__init__.py | 2 +- .../compute_transect_masks_subtask.py | 114 +++++++++++++++--- 2 files changed, 99 insertions(+), 17 deletions(-) diff --git a/mpas_analysis/shared/transects/__init__.py b/mpas_analysis/shared/transects/__init__.py index d9697e3f3..80470320e 100644 --- a/mpas_analysis/shared/transects/__init__.py +++ b/mpas_analysis/shared/transects/__init__.py @@ -1,2 +1,2 @@ from mpas_analysis.shared.transects.compute_transect_masks_subtask \ - import ComputeTransectMasksSubtask + import ComputeTransectMasksSubtask, get_transect_info diff --git a/mpas_analysis/shared/transects/compute_transect_masks_subtask.py b/mpas_analysis/shared/transects/compute_transect_masks_subtask.py index 2554574da..84d98fbbc 100644 --- a/mpas_analysis/shared/transects/compute_transect_masks_subtask.py +++ b/mpas_analysis/shared/transects/compute_transect_masks_subtask.py @@ -17,12 +17,62 @@ from geometric_features import read_feature_collection import mpas_tools.conversion +from geometric_features import GeometricFeatures +from geometric_features.aggregation.ocean import transport + from mpas_analysis.shared.analysis_task import AnalysisTask from mpas_analysis.shared.io.utility import build_config_full_path, \ make_directories, get_region_mask from mpas_analysis.shared.io import write_netcdf +from mpas_analysis.shared.regions import get_feature_list + + +def get_transect_info(transectGroup, config): + """ + Get a geojson mask file and the appropriate file suffix for the given + region group. + + Parameters + ---------- + transectGroup : str + The name of a region group to get mask features for, one of + 'Transport Transects' + + config : mpas_analysis.configuration.MpasAnalysisConfigParser + Configuration options + + Returns + ------- + transect : dict + A dictionary of information about the region + + filename : str + The name of a geojson file with mask features + + suffix : str + A suffix to use for mask files created with these features + + """ + + transects = {'Transport Transects': {'prefix': 'transportTransects', + 'date': '20200621', + 'function': transport}} + + if transectGroup not in transects: + raise ValueError('Unknown transect group {}'.format(transectGroup)) + + transect = transects[transectGroup] + + prefix = transect['prefix'] + date = transect['date'] + + suffix = '{}{}'.format(prefix, date) + filename = get_region_mask(config, '{}.geojson'.format(suffix)) + + return transect, filename, suffix + def compute_mpas_transect_masks(geojsonFileName, meshFileName, maskFileName, logger=None, dir=None): @@ -57,16 +107,12 @@ class ComputeTransectMasksSubtask(AnalysisTask): # {{{ maskFileName : str The name of the output mask file - - meshFileName : str - A mesh file used to create the masks """ # Authors # ------- # Xylar Asay-Davis - def __init__(self, parentTask, geojsonFileName, outFileSuffix, - subtaskName='computeTransectMasks', subprocessCount=1): + def __init__(self, parentTask, transectGroup, subprocessCount=1): # {{{ """ Construct the analysis task and adds it as a subtask of the @@ -78,15 +124,9 @@ def __init__(self, parentTask, geojsonFileName, outFileSuffix, The parent task, used to get the ``taskName``, ``config`` and ``componentName`` - geojsonFileName : str - A geojson file, typically from the MPAS ``geometric_features`` - repository, defining the shapes to be masked - - outFileSuffix : str - The suffix for the resulting mask file - - subtaskName : str, optional - The name of the subtask + transectGroup : str + The name of a transect group, see + :py:func:`mpas_analysis.shared.transects.get_transect_info()` subprocessCount : int, optional The number of processes that can be used to make the mask @@ -95,6 +135,8 @@ def __init__(self, parentTask, geojsonFileName, outFileSuffix, # ------- # Xylar Asay-Davis + subtaskName = transectGroup.replace(' ', '') + # call the constructor from the base class (AnalysisTask) super(ComputeTransectMasksSubtask, self).__init__( config=parentTask.config, @@ -103,12 +145,49 @@ def __init__(self, parentTask, geojsonFileName, outFileSuffix, componentName=parentTask.componentName, tags=[]) - self.geojsonFileName = geojsonFileName - self.outFileSuffix = outFileSuffix self.subprocessCount = subprocessCount + self.obsFileName = None + self.maskSubdirectory = None + self.maskFileName = None + self.transectGroup = transectGroup + self.region, self.geojsonFileName, self.outFileSuffix = \ + get_transect_info(self.transectGroup, self.config) # }}} + def make_transect_mask(self): + """ + If the geojson mask file has not already been cached in the diagnostics + or custom diagnostic directories, it will be created in the analysis + output's masks directory. + """ + function = self.region['function'] + filename = self.geojsonFileName + if not os.path.exists(filename): + gf = GeometricFeatures() + fc = function(gf) + fc.to_geojson(filename) + + def expand_transect_names(self, transectNames): + """ + If ``transectNames`` contains ``'all'``, make sure the geojson file + exists and then return all the transect names found in the file. + + Parameters + ---------- + transectNames : list + A list of transect names + + Returns + ------- + transectNames : list + A list of transect names + """ + if 'all' in transectNames: + self.make_transect_mask() + transectNames = get_feature_list(self.geojsonFileName) + return transectNames + def setup_and_check(self): # {{{ """ Perform steps to set up the analysis and check for errors in the setup. @@ -173,6 +252,9 @@ def run_task(self): # {{{ if os.path.exists(self.maskFileName): return + # make the geojson file if it doesn't exist + self.make_transect_mask() + compute_mpas_transect_masks( self.geojsonFileName, self.obsFileName, self.maskFileName, logger=self.logger, dir=self.maskSubdirectory) From 29aeffa713b7540105679e72f61d24c7a025e5e3 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 22:04:05 +0100 Subject: [PATCH 44/58] Switch Antarctic melt time series to aggregation --- .../ocean/time_series_antarctic_melt.py | 21 +++++++------------ 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/mpas_analysis/ocean/time_series_antarctic_melt.py b/mpas_analysis/ocean/time_series_antarctic_melt.py index 16d4be4e3..a745df54f 100644 --- a/mpas_analysis/ocean/time_series_antarctic_melt.py +++ b/mpas_analysis/ocean/time_series_antarctic_melt.py @@ -29,12 +29,10 @@ from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf from mpas_analysis.shared.io.utility import build_config_full_path, \ - make_directories, build_obs_path, decode_strings, get_region_mask + make_directories, build_obs_path, decode_strings from mpas_analysis.shared.html import write_image_xml -from mpas_analysis.shared.regions import get_feature_list - class TimeSeriesAntarcticMelt(AnalysisTask): # {{{ """ @@ -76,16 +74,13 @@ def __init__(self, config, mpasTimeSeriesTask, regionMasksTask, componentName='ocean', tags=['timeSeries', 'melt', 'landIceCavities', 'antarctic']) - self.iceShelfMasksFile = get_region_mask(config, - 'iceShelves20200621.geojson') - + regionGroup = 'Ice Shelves' + masksSubtask = regionMasksTask.add_mask_subtask(regionGroup=regionGroup) iceShelvesToPlot = config.getExpression('timeSeriesAntarcticMelt', 'iceShelvesToPlot') - if 'all' in iceShelvesToPlot: - iceShelvesToPlot = get_feature_list(self.iceShelfMasksFile) + self.iceShelfMasksFile = masksSubtask.geojsonFileName - masksSubtask = regionMasksTask.add_mask_subtask( - self.iceShelfMasksFile, outFileSuffix='iceShelves20200621') + iceShelvesToPlot = masksSubtask.expand_region_names(iceShelvesToPlot) startYear = config.getint('timeSeries', 'startYear') endYear = config.get('timeSeries', 'endYear') @@ -154,7 +149,7 @@ def __init__(self, parentTask, startYear, endYear, mpasTimeSeriesTask, Parameters ---------- - parentTask : ``AnalysisTask`` + parentTask : TimeSeriesAntarcticMelt The parent task, used to get the ``taskName``, ``config`` and ``componentName`` @@ -382,7 +377,7 @@ def __init__(self, parentTask, startYears, endYears): # {{{ Parameters ---------- - parentTask : ``TimeSeriesOceanRegions`` + parentTask : TimeSeriesAntarcticMelt The main task of which this is a subtask startYears, endYears : list @@ -465,7 +460,7 @@ def __init__(self, parentTask, iceShelf, regionIndex, controlConfig): Parameters ---------- - parentTask : ``AnalysisTask`` + parentTask : TimeSeriesAntarcticMelt The parent task, used to get the ``taskName``, ``config`` and ``componentName`` From 2814edf7237a563cc865180e6eb9340920c35f36 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 22:04:25 +0100 Subject: [PATCH 45/58] Switch ocean regions to aggregation --- .../ocean/time_series_ocean_regions.py | 56 ++++++++----------- 1 file changed, 22 insertions(+), 34 deletions(-) diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index a1087e42a..a1f859389 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -30,8 +30,6 @@ from mpas_analysis.shared.html import write_image_xml -from mpas_analysis.shared.regions import get_feature_list - from mpas_analysis.ocean.utility import compute_zmid from mpas_analysis.shared.constants import constants @@ -125,19 +123,12 @@ def __init__(self, config, regionMasksTask, controlConfig=None): regionGroup[1:].replace(' ', '') sectionName = 'timeSeries{}'.format(sectionSuffix) - regionMaskSuffix = config.getExpression(sectionName, - 'regionMaskSuffix') - - regionMaskFile = get_region_mask( - config, '{}.geojson'.format(regionMaskSuffix)) - regionNames = config.getExpression(sectionName, 'regionNames') - if 'all' in regionNames and os.path.exists(regionMaskFile): - regionNames = get_feature_list(regionMaskFile) - masksSubtask = regionMasksTask.add_mask_subtask( - regionMaskFile, outFileSuffix=regionMaskSuffix) + regionGroup=regionGroup) + + regionNames = masksSubtask.expand_region_names(regionNames) years = list(range(startYear, endYear + 1)) @@ -150,8 +141,8 @@ def __init__(self, config, regionMasksTask, controlConfig=None): config, component=self.componentName, relativePath=localObsDict['gridFileName']) obsMasksSubtask = regionMasksTask.add_mask_subtask( - regionMaskFile, outFileSuffix=regionMaskSuffix, - obsFileName=obsFileName, lonVar=localObsDict['lonVar'], + regionGroup=regionGroup, obsFileName=obsFileName, + lonVar=localObsDict['lonVar'], latVar=localObsDict['latVar'], meshName=localObsDict['gridName']) @@ -195,13 +186,13 @@ def __init__(self, config, regionMasksTask, controlConfig=None): localObsDict = dict(groupObsDicts[obsName]) obsSubtask = ComputeObsRegionalTimeSeriesSubtask( - self, regionGroup, regionName, fullSuffix, - masksSubtask, localObsDict) + self, regionGroup, regionName, fullSuffix, localObsDict) obsSubtasks[obsName] = obsSubtask plotRegionSubtask = PlotRegionTimeSeriesSubtask( self, regionGroup, regionName, index, controlConfig, - sectionName, fullSuffix, obsSubtasks) + sectionName, fullSuffix, obsSubtasks, + masksSubtask.geojsonFileName) plotRegionSubtask.run_after(combineSubtask) self.add_subtask(plotRegionSubtask) @@ -430,7 +421,7 @@ def __init__(self, parentTask, startYear, endYear, masksSubtask, Parameters ---------- - parentTask : ``TimeSeriesOceanRegions`` + parentTask : TimeSeriesOceanRegions The main task of which this is a subtask startYear, endYear : int @@ -664,7 +655,7 @@ def __init__(self, parentTask, startYears, endYears, regionGroup): # {{{ Parameters ---------- - parentTask : ``TimeSeriesOceanRegions`` + parentTask : TimeSeriesOceanRegions The main task of which this is a subtask startYears, endYears : list of int @@ -749,7 +740,7 @@ class ComputeObsRegionalTimeSeriesSubtask(AnalysisTask): # Xylar Asay-Davis def __init__(self, parentTask, regionGroup, regionName, fullSuffix, - masksSubtask, obsDict): + obsDict): # {{{ """ Construct the analysis task. @@ -770,10 +761,6 @@ def __init__(self, parentTask, regionGroup, regionName, fullSuffix, The regionGroup and regionName combined and modified to be appropriate as a task or file suffix - masksSubtask : ``ComputeRegionMasksSubtask`` - A task for creating mask files for each region to plot, used - to get the mask file name - obsDict : dict Information on the observations to compare against """ @@ -791,7 +778,6 @@ def __init__(self, parentTask, regionGroup, regionName, fullSuffix, self.regionGroup = regionGroup self.regionName = regionName - self.masksSubtask = masksSubtask self.obsDict = obsDict self.prefix = fullSuffix[0].lower() + fullSuffix[1:] @@ -848,6 +834,9 @@ def run_task(self): # {{{ regionMaskFileName = obsDict['maskTask'].maskFileName + print(regionMaskFileName) + print(xarray.open_dataset(regionMaskFileName)) + dsRegionMask = \ xarray.open_dataset(regionMaskFileName).stack( nCells=(obsDict['latVar'], obsDict['lonVar'])) @@ -995,14 +984,15 @@ class PlotRegionTimeSeriesSubtask(AnalysisTask): # Xylar Asay-Davis def __init__(self, parentTask, regionGroup, regionName, regionIndex, - controlConfig, sectionName, fullSuffix, obsSubtasks): + controlConfig, sectionName, fullSuffix, obsSubtasks, + geojsonFileName): # {{{ """ Construct the analysis task. Parameters ---------- - parentTask : ``AnalysisTask`` + parentTask : TimeSeriesOceanRegions The parent task, used to get the ``taskName``, ``config`` and ``componentName`` @@ -1027,6 +1017,9 @@ def __init__(self, parentTask, regionGroup, regionName, regionIndex, obsSubtasks : dict of ``AnalysisTasks`` Subtasks for computing the mean observed T and S in the region + + geojsonFileName : str + The geojson file including the feature to plot """ # Authors # ------- @@ -1047,6 +1040,7 @@ def __init__(self, parentTask, regionGroup, regionName, regionIndex, self.controlConfig = controlConfig self.prefix = fullSuffix[0].lower() + fullSuffix[1:] self.obsSubtasks = obsSubtasks + self.geojsonFileName = geojsonFileName for obsName in obsSubtasks: self.run_after(obsSubtasks[obsName]) @@ -1097,13 +1091,7 @@ def run_task(self): # {{{ config = self.config calendar = self.calendar - regionMaskSuffix = config.getExpression(self.sectionName, - 'regionMaskSuffix') - - regionMaskFile = get_region_mask(config, - '{}.geojson'.format(regionMaskSuffix)) - - fcAll = read_feature_collection(regionMaskFile) + fcAll = read_feature_collection(self.geojsonFileName) fc = FeatureCollection() for feature in fcAll.features: From a3179711cdf91a9e03211374a96e0479394d312b Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 22:04:39 +0100 Subject: [PATCH 46/58] Switch transport time series to aggregation --- mpas_analysis/ocean/time_series_transport.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/mpas_analysis/ocean/time_series_transport.py b/mpas_analysis/ocean/time_series_transport.py index e9614e710..5c042cf0a 100644 --- a/mpas_analysis/ocean/time_series_transport.py +++ b/mpas_analysis/ocean/time_series_transport.py @@ -28,14 +28,12 @@ from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf from mpas_analysis.shared.io.utility import build_config_full_path, \ - get_files_year_month, decode_strings, get_region_mask + get_files_year_month, decode_strings from mpas_analysis.shared.html import write_image_xml from mpas_analysis.shared.transects import ComputeTransectMasksSubtask -from mpas_analysis.shared.regions import get_feature_list - class TimeSeriesTransport(AnalysisTask): # {{{ """ @@ -81,17 +79,14 @@ def __init__(self, config, controlConfig=None): years = [year for year in range(startYear, endYear + 1)] - transportTransectFileName = \ - get_region_mask(config, 'transportTransects20200621.geojson') - transectsToPlot = config.getExpression('timeSeriesTransport', 'transectsToPlot') - if 'all' in transectsToPlot: - transectsToPlot = get_feature_list(transportTransectFileName) masksSubtask = ComputeTransectMasksSubtask( - self, transportTransectFileName, - outFileSuffix='transportTransects20200621') + parentTask=self, transectGroup='Transport Transects') + + transectsToPlot = masksSubtask.expand_transect_names(transectsToPlot) + transportTransectFileName = masksSubtask.geojsonFileName self.add_subtask(masksSubtask) From f7722acbc926d5c72f1ae4c7d1583e90f7c3d51c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 22:06:53 +0100 Subject: [PATCH 47/58] Switch Antarctic melt maps to aggregation --- .../ocean/climatology_map_antarctic_melt.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/mpas_analysis/ocean/climatology_map_antarctic_melt.py b/mpas_analysis/ocean/climatology_map_antarctic_melt.py index 0f2df3822..4c8493291 100644 --- a/mpas_analysis/ocean/climatology_map_antarctic_melt.py +++ b/mpas_analysis/ocean/climatology_map_antarctic_melt.py @@ -20,8 +20,8 @@ from mpas_analysis.shared import AnalysisTask -from mpas_analysis.shared.io.utility import build_obs_path, get_region_mask, \ - decode_strings, build_config_full_path +from mpas_analysis.shared.io.utility import build_obs_path, decode_strings, \ + build_config_full_path from mpas_analysis.shared.io import write_netcdf from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \ @@ -34,8 +34,6 @@ from mpas_analysis.shared.constants import constants -from mpas_analysis.shared.regions import get_feature_list - class ClimatologyMapAntarcticMelt(AnalysisTask): # {{{ """ @@ -376,11 +374,9 @@ def __init__(self, parentTask, mpasClimatologyTask, controlConfig, self.mpasClimatologyTask = mpasClimatologyTask self.controlConfig = controlConfig - self.iceShelfMasksFile = get_region_mask(config, - 'iceShelves20200621.geojson') - self.masksSubtask = regionMasksTask.add_mask_subtask( - self.iceShelfMasksFile, outFileSuffix='iceShelves20200621') + regionGroup='Ice Shelves') + self.iceShelfMasksFile = self.masksSubtask.geojsonFileName self.run_after(self.masksSubtask) self.run_after(mpasClimatologyTask) @@ -400,8 +396,8 @@ def run_task(self): # {{{ sectionName = self.taskName iceShelvesInTable = config.getExpression(sectionName, 'iceShelvesInTable') - if 'all' in iceShelvesInTable: - iceShelvesInTable = get_feature_list(self.iceShelfMasksFile) + iceShelvesInTable = self.masksSubtask.expand_region_names( + iceShelvesInTable) meltRateFileName = get_masked_mpas_climatology_file_name( config, self.season, self.componentName, From afe6f763a0d65d9430e92483e74e2a664a72e141 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 22:16:41 +0100 Subject: [PATCH 48/58] Switch ocean regional profiles to aggregation --- .../ocean/ocean_regional_profiles.py | 25 ++++++------------- 1 file changed, 7 insertions(+), 18 deletions(-) diff --git a/mpas_analysis/ocean/ocean_regional_profiles.py b/mpas_analysis/ocean/ocean_regional_profiles.py index 87083b9d1..094928991 100644 --- a/mpas_analysis/ocean/ocean_regional_profiles.py +++ b/mpas_analysis/ocean/ocean_regional_profiles.py @@ -26,7 +26,6 @@ get_files_year_month, make_directories, decode_strings, get_region_mask from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf from mpas_analysis.shared.timekeeping.utility import days_to_datetime -from mpas_analysis.shared.regions import get_feature_list from mpas_analysis.shared.climatology import compute_climatology from mpas_analysis.shared.constants import constants from mpas_analysis.ocean.plot_hovmoller_subtask import PlotHovmollerSubtask @@ -79,8 +78,7 @@ def __init__(self, config, regionMasksTask, controlConfig=None): # {{{ self.seasons = config.getExpression('oceanRegionalProfiles', 'seasons') - self.regionMaskSuffix = config.get('oceanRegionalProfiles', - 'regionMaskSuffix') + regionGroup = config.get('oceanRegionalProfiles', 'regionGroup') self.regionNames = config.getExpression('oceanRegionalProfiles', 'regionNames') @@ -88,20 +86,14 @@ def __init__(self, config, regionMasksTask, controlConfig=None): # {{{ plotHovmoller = config.getboolean('oceanRegionalProfiles', 'plotHovmoller') - self.regionMaskSuffix = config.get('oceanRegionalProfiles', - 'regionMaskSuffix') - hovmollerGalleryGroup = config.get('oceanRegionalProfiles', 'hovmollerGalleryGroup') - masksFile = get_region_mask(config, - '{}.geojson'.format(self.regionMaskSuffix)) - - masksSubtask = regionMasksTask.add_mask_subtask( - masksFile, outFileSuffix=self.regionMaskSuffix) + masksSubtask = regionMasksTask.add_mask_subtask(regionGroup) + masksFile = masksSubtask.geojsonFileName + self.regionMaskSuffix = masksSubtask.outFileSuffix - if 'all' in self.regionNames: - self.regionNames = get_feature_list(masksFile) + self.regionNames = masksSubtask.expand_region_names(self.regionNames) self.masksSubtask = masksSubtask @@ -566,7 +558,7 @@ def __init__(self, parentTask, season, regionName, field, controlConfig): Parameters ---------- - parentTask : ``AnalysisTask`` + parentTask : OceanRegionalProfiles The parent task of which this is a subtask season : str @@ -643,10 +635,7 @@ def run_task(self): # {{{ startYear = self.parentTask.startYear endYear = self.parentTask.endYear - regionMaskSuffix = config.get('oceanRegionalProfiles', - 'regionMaskSuffix') - regionMaskFile = get_region_mask(config, - '{}.geojson'.format(regionMaskSuffix)) + regionMaskFile = self.parentTask.masksSubtask.geojsonFileName fcAll = read_feature_collection(regionMaskFile) From 44a71b18efcf5743686f4c8c32901324c36c762e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 23:25:50 +0100 Subject: [PATCH 49/58] Switch regional T-S diagrams to aggregration --- mpas_analysis/ocean/regional_ts_diagrams.py | 26 ++++++--------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/mpas_analysis/ocean/regional_ts_diagrams.py b/mpas_analysis/ocean/regional_ts_diagrams.py index 3406097d2..a612d1925 100644 --- a/mpas_analysis/ocean/regional_ts_diagrams.py +++ b/mpas_analysis/ocean/regional_ts_diagrams.py @@ -32,13 +32,11 @@ from mpas_analysis.shared.io import write_netcdf -from mpas_analysis.shared.io.utility import decode_strings, get_region_mask, \ +from mpas_analysis.shared.io.utility import decode_strings, \ build_obs_path, build_config_full_path, make_directories from mpas_analysis.shared.html import write_image_xml -from mpas_analysis.shared.regions import get_feature_list - from mpas_analysis.ocean.utility import compute_zmid from mpas_analysis.shared.constants import constants @@ -166,19 +164,12 @@ def __init__(self, config, mpasClimatologyTask, regionMasksTask, regionGroup[1:].replace(' ', '') sectionName = 'TSDiagramsFor{}'.format(sectionSuffix) - regionMaskSuffix = config.getExpression(sectionName, - 'regionMaskSuffix') - - regionMaskFile = get_region_mask( - config, '{}.geojson'.format(regionMaskSuffix)) - regionNames = config.getExpression(sectionName, 'regionNames') - if 'all' in regionNames and os.path.exists(regionMaskFile): - regionNames = get_feature_list(regionMaskFile) - mpasMasksSubtask = regionMasksTask.add_mask_subtask( - regionMaskFile, outFileSuffix=regionMaskSuffix) + regionGroup=regionGroup) + + regionNames = mpasMasksSubtask.expand_region_names(regionNames) obsList = config.getExpression(sectionName, 'obs') groupObsDicts = {} @@ -189,8 +180,8 @@ def __init__(self, config, mpasClimatologyTask, regionMasksTask, config, component=self.componentName, relativePath=localObsDict['gridFileName']) obsMasksSubtask = regionMasksTask.add_mask_subtask( - regionMaskFile, outFileSuffix=regionMaskSuffix, - obsFileName=obsFileName, lonVar=localObsDict['lonVar'], + regionGroup, obsFileName=obsFileName, + lonVar=localObsDict['lonVar'], latVar=localObsDict['latVar'], meshName=localObsDict['gridName']) @@ -938,10 +929,7 @@ def run_task(self): # {{{ startYear = self.mpasClimatologyTask.startYear endYear = self.mpasClimatologyTask.endYear - regionMaskSuffix = config.getExpression(sectionName, 'regionMaskSuffix') - - regionMaskFile = get_region_mask(config, - '{}.geojson'.format(regionMaskSuffix)) + regionMaskFile = self.mpasMasksSubtask.geojsonFileName fcAll = read_feature_collection(regionMaskFile) From 21e0fa405b6cef72b7327c1fb95b951f385241db Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 23:26:32 +0100 Subject: [PATCH 50/58] Update the config files following aggregation changes --- configs/polarRegions.conf | 27 ++++++++++++++++++++++---- mpas_analysis/config.default | 37 ++++++++++-------------------------- 2 files changed, 33 insertions(+), 31 deletions(-) diff --git a/configs/polarRegions.conf b/configs/polarRegions.conf index dcbde8166..ef2dc675c 100644 --- a/configs/polarRegions.conf +++ b/configs/polarRegions.conf @@ -53,6 +53,11 @@ regionGroups = ['Antarctic Regions', 'Ocean Basins'] [TSDiagramsForAntarcticRegions] ## options related to plotting T/S diagrams of Antarctic regions +# list of regions to plot or ['all'] for all regions in the masks file. +# See "regionNames" in the antarcticRegions masks file in +# regionMaskSubdirectory for details. +regionNames = ['all'] + # The minimum and maximum depth over which fields are plotted, default is # to take these values from the geojson feature's zmin and zmax properties. zmin = -1000 @@ -188,10 +193,8 @@ seasons = ['ANN'] # minimum and maximum depth of profile plots, or empty for the full depth range depthRange = [-600., 0.] -# The suffix on the regional mask file to be used to determine the regions to -# plot. A region mask file should be in the regionMaskDirectory and should -# be named _.nc -regionMaskSuffix = antarcticRegions20200621 +# The name of a region group defining the region for each profile +regionGroup = Antarctic Regions # a list of region names from the region masks file to plot regionNames = ["Southern Ocean 60S", "Weddell Sea Shelf", @@ -269,3 +272,19 @@ colorbarTicksResult = [-20., -10., -5., -2., -1., 0., 1., 2., 5., 10., 20.] normArgsDifference = {'linthresh': 1., 'linscale': 0.5, 'vmin': -20., 'vmax': 20.} colorbarTicksDifference = [-20., -10., -5., -2., -1., 0., 1., 2., 5., 10., 20.] + +# make a tables of mean melt rates and melt fluxes for individual ice shelves? +makeTables = True + +# If making tables, which ice shelves? This is a list of ice shelves or +# ['all'] for all 106 ice shelves and regions. +iceShelvesInTable = ['all'] + + +[timeSeriesAntarcticRegions] +## options related to plotting time series of Antarctic regions + +# list of regions to plot or ['all'] for all regions in the masks file. +# See "regionNames" in the antarcticRegions masks file in +# regionMaskSubdirectory for details. +regionNames = ['all'] diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 8c6a1f1dd..951bfd2a1 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -1217,11 +1217,11 @@ colorbarTicksDifference = [-100., -50., -20., -10., -5., -2., -1., 0., 1., 2., 5., 10., 20., 50., 100.] # make a tables of mean melt rates and melt fluxes for individual ice shelves? -makeTables = True +makeTables = False # If making tables, which ice shelves? This is a list of ice shelves or # ['all'] for all 106 ice shelves and regions. -iceShelvesInTable = ['all'] +iceShelvesInTable = [] [timeSeriesAntarcticMelt] @@ -1260,16 +1260,10 @@ regionGroups = ['Antarctic Regions'] [timeSeriesAntarcticRegions] ## options related to plotting time series of Antarctic regions -# An identifying string that is the prefix for a geojson file containing -# Antarctic ocean regions. Each region must have 'zmin' and 'zmax' properties -# in addition to the usual properties for a region in geometric_features. The -# string is also used as the suffix for mask files generated from the geojson -regionMaskSuffix = 'antarcticRegions20200621' - # list of regions to plot or ['all'] for all regions in the masks file. # See "regionNames" in the antarcticRegions masks file in # regionMaskSubdirectory for details. -regionNames = ['all'] +regionNames = [] # a list of variables to plot variables = [{'name': 'temperature', @@ -1347,16 +1341,10 @@ subprocessCount = 4 [TSDiagramsForAntarcticRegions] ## options related to plotting T/S diagrams of Antarctic regions -# An identifying string that is the prefix for a geojson file containing -# Antarctic ocean regions. Each region must have 'zmin' and 'zmax' properties -# in addition to the usual properties for a region in geometric_features. The -# string is also used as the suffix for mask files generated from the geojson -regionMaskSuffix = 'antarcticRegions20200621' - # list of regions to plot or ['all'] for all regions in the masks file. # See "regionNames" in the antarcticRegions masks file in # regionMaskSubdirectory for details. -regionNames = ['all'] +regionNames = [] # diagram type, either 'volumetric' or 'scatter', depending on if the points # should be binned the plot should show the volume fraction in each bin or @@ -1396,16 +1384,13 @@ obs = ['SOSE', 'WOA18'] [TSDiagramsForOceanBasins] ## options related to plotting T/S diagrams of major ocean basins -# An identifying string that is the prefix for a geojson file containing -# ocean basins. Each region must have 'zmin' and 'zmax' properties in addition -# to the usual properties for a region in geometric_features. The string is -# also used as the suffix for mask files generated from the geojson file -regionMaskSuffix = 'oceanBasins20200621' - # list of regions to plot or ['all'] for all regions in the masks file. # See "regionNames" in the oceanBasins masks file in # regionMaskSubdirectory for details. -regionNames = ['all'] +regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", + "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin", + "Global Ocean", "Global Ocean 65N to 65S", + "Global Ocean 15S to 15N"] # diagram type, either 'volumetric' or 'scatter', depending on if the points # should be binned the plot should show the volume fraction in each bin or @@ -2969,10 +2954,8 @@ seasons = ['JFM', 'JAS', 'ANN'] # minimum and maximum depth of profile plots, or empty for the full depth range depthRange = [] -# The suffix on the regional mask file to be used to determine the regions to -# plot. A region mask file should be in the regionMaskDirectory and should -# be named _.nc -regionMaskSuffix = oceanBasins20200621 +# The name of a region group defining the region for each profile +regionGroup = Ocean Basins # a list of region names from the region masks file to plot regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", From 0c72a99773b95092ad7166b70e5b3e08ff46250a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 23 Nov 2020 23:27:20 +0100 Subject: [PATCH 51/58] Update geometric_features and mpas_tools versions --- README.md | 8 ++++---- ci/recipe/meta.yaml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index a6065f6a7..cb6a651b0 100644 --- a/README.md +++ b/README.md @@ -62,10 +62,10 @@ environment with the following packages: * shapely * cartopy >= 0.18.0 * cartopy\_offlinedata - * geometric\_features >= 0.1.9 + * geometric\_features >= 0.1.12 * gsw * pyremap < 0.1.0 - * mpas\_tools >= 0.0.13 + * mpas\_tools >= 0.0.15 These can be installed via the conda command: ``` @@ -74,8 +74,8 @@ conda config --set channel_priority strict conda create -n mpas-analysis python=3.8 numpy scipy "matplotlib>=3.0.2" \ netCDF4 "xarray>=0.14.1" dask bottleneck lxml "nco>=4.8.1" pyproj \ pillow cmocean progressbar2 requests setuptools shapely "cartopy>=0.18.0" \ - cartopy_offlinedata "geometric_features>=0.1.9" gsw "pyremap<0.1.0" \ - "mpas_tools>=0.0.13" + cartopy_offlinedata "geometric_features>=0.1.12" gsw "pyremap<0.1.0" \ + "mpas_tools>=0.0.15" conda activate mpas-analysis ``` diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml index 2a3271f68..f36ad7817 100644 --- a/ci/recipe/meta.yaml +++ b/ci/recipe/meta.yaml @@ -40,10 +40,10 @@ requirements: - shapely - cartopy >=0.18.0 - cartopy_offlinedata - - geometric_features >=0.1.9 + - geometric_features >=0.1.12 - gsw - pyremap <0.1.0 - - mpas_tools >=0.0.13 + - mpas_tools >=0.0.15 test: requires: From af1729f1eee2bbcac31d210f9c1b0c00f6c22dd0 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 24 Nov 2020 04:51:37 -0600 Subject: [PATCH 52/58] Handle empty region/transect lists --- mpas_analysis/ocean/climatology_map_antarctic_melt.py | 3 +++ mpas_analysis/ocean/ocean_regional_profiles.py | 2 ++ mpas_analysis/ocean/regional_ts_diagrams.py | 2 ++ mpas_analysis/ocean/time_series_antarctic_melt.py | 6 +++++- mpas_analysis/ocean/time_series_ocean_regions.py | 3 +++ mpas_analysis/ocean/time_series_transport.py | 2 ++ 6 files changed, 17 insertions(+), 1 deletion(-) diff --git a/mpas_analysis/ocean/climatology_map_antarctic_melt.py b/mpas_analysis/ocean/climatology_map_antarctic_melt.py index 4c8493291..50bf58f75 100644 --- a/mpas_analysis/ocean/climatology_map_antarctic_melt.py +++ b/mpas_analysis/ocean/climatology_map_antarctic_melt.py @@ -396,6 +396,9 @@ def run_task(self): # {{{ sectionName = self.taskName iceShelvesInTable = config.getExpression(sectionName, 'iceShelvesInTable') + if len(iceShelvesInTable) == 0: + return + iceShelvesInTable = self.masksSubtask.expand_region_names( iceShelvesInTable) diff --git a/mpas_analysis/ocean/ocean_regional_profiles.py b/mpas_analysis/ocean/ocean_regional_profiles.py index 094928991..d921d9518 100644 --- a/mpas_analysis/ocean/ocean_regional_profiles.py +++ b/mpas_analysis/ocean/ocean_regional_profiles.py @@ -82,6 +82,8 @@ def __init__(self, config, regionMasksTask, controlConfig=None): # {{{ self.regionNames = config.getExpression('oceanRegionalProfiles', 'regionNames') + if len(self.regionNames) == 0: + return plotHovmoller = config.getboolean('oceanRegionalProfiles', 'plotHovmoller') diff --git a/mpas_analysis/ocean/regional_ts_diagrams.py b/mpas_analysis/ocean/regional_ts_diagrams.py index a612d1925..376dbd2f5 100644 --- a/mpas_analysis/ocean/regional_ts_diagrams.py +++ b/mpas_analysis/ocean/regional_ts_diagrams.py @@ -165,6 +165,8 @@ def __init__(self, config, mpasClimatologyTask, regionMasksTask, sectionName = 'TSDiagramsFor{}'.format(sectionSuffix) regionNames = config.getExpression(sectionName, 'regionNames') + if len(regionNames) == 0: + continue mpasMasksSubtask = regionMasksTask.add_mask_subtask( regionGroup=regionGroup) diff --git a/mpas_analysis/ocean/time_series_antarctic_melt.py b/mpas_analysis/ocean/time_series_antarctic_melt.py index a745df54f..122ca1cce 100644 --- a/mpas_analysis/ocean/time_series_antarctic_melt.py +++ b/mpas_analysis/ocean/time_series_antarctic_melt.py @@ -75,9 +75,13 @@ def __init__(self, config, mpasTimeSeriesTask, regionMasksTask, tags=['timeSeries', 'melt', 'landIceCavities', 'antarctic']) regionGroup = 'Ice Shelves' - masksSubtask = regionMasksTask.add_mask_subtask(regionGroup=regionGroup) iceShelvesToPlot = config.getExpression('timeSeriesAntarcticMelt', 'iceShelvesToPlot') + if len(iceShelvesToPlot) == 0: + # nothing else to do + return + + masksSubtask = regionMasksTask.add_mask_subtask(regionGroup=regionGroup) self.iceShelfMasksFile = masksSubtask.geojsonFileName iceShelvesToPlot = masksSubtask.expand_region_names(iceShelvesToPlot) diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index a1f859389..a55378ab9 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -124,6 +124,9 @@ def __init__(self, config, regionMasksTask, controlConfig=None): sectionName = 'timeSeries{}'.format(sectionSuffix) regionNames = config.getExpression(sectionName, 'regionNames') + if len(regionNames) == 0: + # no regions in this group were requested + continue masksSubtask = regionMasksTask.add_mask_subtask( regionGroup=regionGroup) diff --git a/mpas_analysis/ocean/time_series_transport.py b/mpas_analysis/ocean/time_series_transport.py index 5c042cf0a..3257334b9 100644 --- a/mpas_analysis/ocean/time_series_transport.py +++ b/mpas_analysis/ocean/time_series_transport.py @@ -81,6 +81,8 @@ def __init__(self, config, controlConfig=None): transectsToPlot = config.getExpression('timeSeriesTransport', 'transectsToPlot') + if len(transectsToPlot) == 0: + return masksSubtask = ComputeTransectMasksSubtask( parentTask=self, transectGroup='Transport Transects') From a07c7ac9e171c1ce4b22599d01871560f2e3704e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 24 Nov 2020 05:46:30 -0600 Subject: [PATCH 53/58] Fix z bounds in ocean region time series --- .../ocean/time_series_ocean_regions.py | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index a55378ab9..6885ea4ce 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -343,8 +343,8 @@ def run_task(self): # {{{ for regionIndex in range(nRegions): self.logger.info(' region: {}'.format( self.regionNames[regionIndex])) - dsRregion = dsRegionMask.isel(nRegions=regionIndex) - cellMask = dsRregion.regionCellMasks == 1 + dsRegion = dsRegionMask.isel(nRegions=regionIndex) + cellMask = dsRegion.regionCellMasks == 1 if openOceanMask is not None: cellMask = numpy.logical_and(cellMask, openOceanMask) @@ -354,29 +354,28 @@ def run_task(self): # {{{ 1e-12 * totalArea.values)) if config_zmin is None: - if 'zminRegions' in dsRregion: - zmin = dsRregion.zminRegions + if 'zminRegions' in dsRegion: + zmin = dsRegion.zminRegions.values else: # the old naming convention, used in some pre-generated # mask files - zmin = dsRregion.zmin + zmin = dsRegion.zmin.values else: - zmin = (('nRegions',), config_zmin) + zmin = config_zmin if config_zmax is None: - if 'zmaxRegions' in dsRregion: - zmax = dsRregion.zmaxRegions + if 'zmaxRegions' in dsRegion: + zmax = dsRegion.zmaxRegions.values else: # the old naming convention, used in some pre-generated # mask files - zmax = dsRregion.zmax + zmax = dsRegion.zmax.values else: - zmax = (('nRegions',), config_zmax) - + zmax = config_zmax depthMask = numpy.logical_and(zMid >= zmin, zMid <= zmax) dsOut = xarray.Dataset() - dsOut['zmin'] = zmin - dsOut['zmax'] = zmax + dsOut['zmin'] = ('nRegions', [zmin]) + dsOut['zmax'] = ('nRegions', [zmax]) dsOut['totalArea'] = totalArea dsOut['cellMask'] = cellMask dsOut['depthMask'] = depthMask From e310734e9c3a945d38c26f100a50fe93496e263b Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 27 Nov 2020 11:38:14 +0100 Subject: [PATCH 54/58] Remove ReadTheDocs --- readthedocs.yml | 31 ------------------------------- 1 file changed, 31 deletions(-) delete mode 100644 readthedocs.yml diff --git a/readthedocs.yml b/readthedocs.yml deleted file mode 100644 index 5e664e40f..000000000 --- a/readthedocs.yml +++ /dev/null @@ -1,31 +0,0 @@ -# .readthedocs.yml -# Read the Docs configuration file -# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - -# Required -version: 2 - -# Build documentation in the docs/ directory with Sphinx -sphinx: - configuration: docs/conf.py - -# Optionally build your docs in additional formats such as PDF and ePub -# Build PDF -formats: - - pdf - -# Optionally set the version of Python and requirements required to build your docs -python: - version: 3.7 - install: - - method: setuptools - path: . - system_packages: true - -conda: - environment: docs/environment.yml - -build: - image: latest - - From a66e0550c76be295e2f68cd2c3d3a60b869ca810 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 27 Nov 2020 11:40:14 +0100 Subject: [PATCH 55/58] Update to v1.2.9 --- ci/recipe/meta.yaml | 2 +- docs/versions.rst | 3 +++ mpas_analysis/__init__.py | 2 +- setup.py | 2 +- 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml index f36ad7817..573e90c55 100644 --- a/ci/recipe/meta.yaml +++ b/ci/recipe/meta.yaml @@ -1,5 +1,5 @@ {% set name = "MPAS-Analysis" %} -{% set version = "1.2.8" %} +{% set version = "1.2.9" %} package: name: {{ name|lower }} diff --git a/docs/versions.rst b/docs/versions.rst index 23448dbc8..de382a959 100644 --- a/docs/versions.rst +++ b/docs/versions.rst @@ -9,6 +9,7 @@ Documentation On GitHub `v1.2.6`_ `1.2.6`_ `v1.2.7`_ `1.2.7`_ `v1.2.8`_ `1.2.8`_ +`v1.2.9`_ `1.2.9`_ ================ =============== .. _`stable`: ../stable/index.html @@ -16,8 +17,10 @@ Documentation On GitHub .. _`v1.2.6`: ../1.2.6/index.html .. _`v1.2.7`: ../1.2.7/index.html .. _`v1.2.8`: ../1.2.8/index.html +.. _`v1.2.9`: ../1.2.9/index.html .. _`master`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/master .. _`develop`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/develop .. _`1.2.6`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.6 .. _`1.2.7`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.7 .. _`1.2.8`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.8 +.. _`1.2.9`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.9 diff --git a/mpas_analysis/__init__.py b/mpas_analysis/__init__.py index 92f4d2f13..a1ca3e687 100644 --- a/mpas_analysis/__init__.py +++ b/mpas_analysis/__init__.py @@ -3,5 +3,5 @@ import matplotlib as mpl mpl.use('Agg') -__version_info__ = (1, 2, 8) +__version_info__ = (1, 2, 9) __version__ = '.'.join(str(vi) for vi in __version_info__) diff --git a/setup.py b/setup.py index 7e633190e..0e0f7b8c5 100755 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ isrelease = True -version = '1.2.8' +version = '1.2.9' if not isrelease: import subprocess From fcb70903b5b5b953f063e4c3c0657416f03a8fb6 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 27 Nov 2020 20:28:58 +0100 Subject: [PATCH 56/58] Break hovmollerOceanRegions into its own task Also, make the oceanRegionalProfiles task support multiple region groups. To support both of these changes, the oceanRegionalProfiles task now has a functionality for adding a new range of years and/or region group (creating all of the subtasks to compute the appropriate years and combine them into profiles and/or Hovmoller datasets). Some config sections have been renamed to support these changes, and configs/polarTegions.conf has been updated accordingly. --- configs/polarRegions.conf | 79 +++- mpas_analysis/__main__.py | 8 +- mpas_analysis/config.default | 68 +++- mpas_analysis/ocean/__init__.py | 3 + .../ocean/hovmoller_ocean_regions.py | 134 +++++++ .../ocean/ocean_regional_profiles.py | 356 +++++++++++------- 6 files changed, 475 insertions(+), 173 deletions(-) create mode 100644 mpas_analysis/ocean/hovmoller_ocean_regions.py diff --git a/configs/polarRegions.conf b/configs/polarRegions.conf index ef2dc675c..859f9db12 100644 --- a/configs/polarRegions.conf +++ b/configs/polarRegions.conf @@ -182,10 +182,36 @@ colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, colorbarLevelsDifference = [-0.5, -0.2, -0.1, -0.05, -0.02, 0, 0.02, 0.05, 0.1, 0.2, 0.5] + [oceanRegionalProfiles] ## options related to plotting vertical profiles of regional means (and ## variability) of 3D MPAS fields +regionGroups = ['Antarctic Regions'] + + +[profilesAntarcticRegions] +## options related to plotting vertical profiles Antarctic regions + + +# a list of dictionaries for each field to plot. The dictionary includes +# prefix (used for file names, task names and sections) as well as the mpas +# name of the field, units for colorbars and a the name as it should appear +# in figure titles and captions. +fields = + [{'prefix': 'potentialTemperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$\degree$C', + 'titleName': 'Potential Temperature'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$^{-3}$', + 'titleName': 'Potential Density'}] + # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, # Nov, Dec, JFM, AMJ, JAS, OND, ANN) seasons = ['ANN'] @@ -193,9 +219,6 @@ seasons = ['ANN'] # minimum and maximum depth of profile plots, or empty for the full depth range depthRange = [-600., 0.] -# The name of a region group defining the region for each profile -regionGroup = Antarctic Regions - # a list of region names from the region masks file to plot regionNames = ["Southern Ocean 60S", "Weddell Sea Shelf", "Weddell Sea Deep", "Bellingshausen Sea Shelf", @@ -205,17 +228,48 @@ regionNames = ["Southern Ocean 60S", "Weddell Sea Shelf", "Western Ross Sea Deep", "East Antarctic Seas Shelf", "East Antarctic Seas Deep"] -# Make Hovmoller plots of fields vs time and depth? -plotHovmoller = True - -# web gallery options -hovmollerGalleryGroup = Antarctic Regional Time Series vs Depths - # web gallery options profileGalleryGroup = Antarctic Regional Profiles +[hovmollerOceanRegions] +## options related to plotting Hovmoller diagrams (depth vs. time plots) of +## regional means of 3D MPAS fields + +# the names of region groups to plot, each with its own section below +regionGroups = ['Antarctic Regions'] + + +[hovmollerAntarcticRegions] +## options related to plotting Hovmoller diagrams of Antarctic Regions + +# a list of dictionaries for each field to plot. The dictionary includes +# prefix (used for file names, task names and sections) as well as the MPAS +# name of the field, units for colorbars and a the name as it should appear +# in figure titles and captions. +fields = + [{'prefix': 'potentialTemperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$\degree$C', + 'titleName': 'Potential Temperature'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$^{-3}$', + 'titleName': 'Potential Density'}] -[temperatureOceanRegionalHovmoller] +# a list of region names from the region masks file to plot +regionNames = ["Southern Ocean 60S", "Weddell Sea Shelf", + "Weddell Sea Deep", "Bellingshausen Sea Shelf", + "Bellingshausen Sea Deep", "Amundsen Sea Shelf", + "Amundsen Sea Deep", "Eastern Ross Sea Shelf", + "Eastern Ross Sea Deep", "Western Ross Sea Shelf", + "Western Ross Sea Deep", "East Antarctic Seas Shelf", + "East Antarctic Seas Deep"] + +[hovmollerOceanRegionsPotentialTemperature] ## options related to plotting time series of temperature vs. depth in ocean ## regions @@ -229,8 +283,7 @@ movingAverageMonths = 12 # limits on depth, the full range by default yLim = [-600., -5.] - -[salinityOceanRegionalHovmoller] +[hovmollerOceanRegionsSalinity] ## options related to plotting time series of salinity vs. depth in ocean ## regions @@ -245,7 +298,7 @@ movingAverageMonths = 12 yLim = [-600., -5.] -[potentialDensityOceanRegionalHovmoller] +[hovmollerOceanRegionsPotentialDensity] ## options related to plotting time series of potential density vs. depth in ## ocean regions diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index 709822aba..e27e4c254 100755 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -210,8 +210,12 @@ def build_analysis_list(config, controlConfig): # {{{ analyses.append(ocean.GeojsonTransects(config, oceanClimatolgyTasks['avg'], controlConfig)) - analyses.append(ocean.OceanRegionalProfiles(config, oceanRegionMasksTask, - controlConfig)) + oceanRegionalProfiles = ocean.OceanRegionalProfiles( + config, oceanRegionMasksTask, controlConfig) + analyses.append(oceanRegionalProfiles) + + analyses.append(ocean.HovmollerOceanRegions( + config, oceanRegionMasksTask, oceanRegionalProfiles, controlConfig)) # Sea Ice Analyses seaIceClimatolgyTask = MpasClimatologyTask(config=config, diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 951bfd2a1..9df61fc15 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -1289,7 +1289,7 @@ variables = [{'name': 'temperature', # zmin = -1000 # zmax = -400 -# Obserational data sets to compare against +# Observational data sets to compare against obs = ['SOSE', 'WOA18'] @@ -2929,12 +2929,19 @@ galleryLabel = Chlorophyll ## options related to plotting vertical profiles of regional means (and ## variability) of 3D MPAS fields +# The name of a region group defining the region for each profile +regionGroups = ['Ocean Basins'] + + +[profilesOceanBasins] +## options related to plotting vertical profiles ocean basins + # a list of dictionaries for each field to plot. The dictionary includes # prefix (used for file names, task names and sections) as well as the mpas # name of the field, units for colorbars and a the name as it should appear # in figure titles and captions. fields = - [{'prefix': 'temperature', + [{'prefix': 'potentialTemperature', 'mpas': 'timeMonthly_avg_activeTracers_temperature', 'units': r'$\degree$C', 'titleName': 'Potential Temperature'}, @@ -2954,33 +2961,60 @@ seasons = ['JFM', 'JAS', 'ANN'] # minimum and maximum depth of profile plots, or empty for the full depth range depthRange = [] -# The name of a region group defining the region for each profile -regionGroup = Ocean Basins - # a list of region names from the region masks file to plot regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin", "Global Ocean", "Global Ocean 65N to 65S", "Global Ocean 15S to 15N"] -# Make Hovmoller plots of fields vs time and depth? -plotHovmoller = False - -# web gallery options -hovmollerGalleryGroup = Ocean Basin Time Series vs Depths - # web gallery options profileGalleryGroup = Ocean Basin Profiles -[temperatureOceanRegionalHovmoller] +[hovmollerOceanRegions] +## options related to plotting Hovmoller diagrams (depth vs. time plots) of +## regional means of 3D MPAS fields + +# the names of region groups to plot, each with its own section below +regionGroups = ['Ocean Basins'] + + +[hovmollerOceanBasins] +## options related to plotting Hovmoller diagrams of ocean basins + +# a list of dictionaries for each field to plot. The dictionary includes +# prefix (used for file names, task names and sections) as well as the MPAS +# name of the field, units for colorbars and a the name as it should appear +# in figure titles and captions. +fields = + [{'prefix': 'potentialTemperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$\degree$C', + 'titleName': 'Potential Temperature'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$^{-3}$', + 'titleName': 'Potential Density'}] + +# a list of region names from the region masks file to plot +regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", + "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin", + "Global Ocean", "Global Ocean 65N to 65S", + "Global Ocean 15S to 15N"] + + +[hovmollerOceanRegionsPotentialTemperature] ## options related to plotting time series of temperature vs. depth in ocean ## regions # Number of points over which to compute moving average(e.g., for monthly # output, movingAverageMonths=12 corresponds to a 12-month moving average # window) -movingAverageMonths = 1 +movingAverageMonths = 12 # colormap colormapNameResult = RdYlBu_r @@ -3018,14 +3052,14 @@ contourLevels = 'none' # yLim = [-6000., 0.] -[salinityOceanRegionalHovmoller] +[hovmollerOceanRegionsSalinity] ## options related to plotting time series of salinity vs. depth in ocean ## regions # Number of points over which to compute moving average(e.g., for monthly # output, movingAverageMonths=12 corresponds to a 12-month moving average # window) -movingAverageMonths = 1 +movingAverageMonths = 12 # colormap colormapNameResult = haline @@ -3063,14 +3097,14 @@ contourLevels = 'none' # yLim = [-6000., 0.] -[potentialDensityOceanRegionalHovmoller] +[hovmollerOceanRegionsPotentialDensity] ## options related to plotting time series of potential density vs. depth in ## ocean regions # Number of points over which to compute moving average(e.g., for monthly # output, movingAverageMonths=12 corresponds to a 12-month moving average # window) -movingAverageMonths = 1 +movingAverageMonths = 12 # colormap colormapNameResult = Spectral_r diff --git a/mpas_analysis/ocean/__init__.py b/mpas_analysis/ocean/__init__.py index 86babe722..0ec961719 100644 --- a/mpas_analysis/ocean/__init__.py +++ b/mpas_analysis/ocean/__init__.py @@ -50,3 +50,6 @@ from mpas_analysis.ocean.ocean_regional_profiles import \ OceanRegionalProfiles + +from mpas_analysis.ocean.hovmoller_ocean_regions import \ + HovmollerOceanRegions diff --git a/mpas_analysis/ocean/hovmoller_ocean_regions.py b/mpas_analysis/ocean/hovmoller_ocean_regions.py new file mode 100644 index 000000000..5a6a4a489 --- /dev/null +++ b/mpas_analysis/ocean/hovmoller_ocean_regions.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# This software is open source software available under the BSD-3 license. +# +# Copyright (c) 2020 Triad National Security, LLC. All rights reserved. +# Copyright (c) 2020 Lawrence Livermore National Security, LLC. All rights +# reserved. +# Copyright (c) 2020 UT-Battelle, LLC. All rights reserved. +# +# Additional copyright and license information can be found in the LICENSE file +# distributed with this code, or at +# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/master/LICENSE +# + +from mpas_analysis.shared import AnalysisTask +from mpas_analysis.ocean.plot_hovmoller_subtask import PlotHovmollerSubtask + + +class HovmollerOceanRegions(AnalysisTask): # {{{ + """ + Compute and plot a Hovmoller diagram (depth vs. time) for regionally + analyzed data. The mean of the data are computed over each region at each + depth and time. + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, config, regionMasksTask, oceanRegionalProfilesTask, + controlConfig=None): # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + regionMasksTask : ``ComputeRegionMasks`` + A task for computing region masks + + oceanRegionalProfilesTask : mpas_analysis.ocean.OceanRegionalProfiles + A task for computing ocean regional profiles + + controlConfig : ``MpasAnalysisConfigParser``, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(HovmollerOceanRegions, self).__init__( + config=config, + taskName='hovmollerOceanRegions', + componentName='ocean', + tags=['profiles', 'timeSeries', 'hovmoller']) + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.get('timeSeries', 'endYear') + if endYear == 'end': + # a valid end year wasn't found, so likely the run was not found, + # perhaps because we're just listing analysis tasks + endYear = startYear + else: + endYear = int(endYear) + + regionGroups = config.getExpression('hovmollerOceanRegions', + 'regionGroups') + + for regionGroup in regionGroups: + suffix = regionGroup[0].upper() + regionGroup[1:].replace(' ', '') + regionGroupSection = 'hovmoller{}'.format(suffix) + regionNames = config.getExpression(regionGroupSection, + 'regionNames') + if len(regionNames) == 0: + return + + fields = config.getExpression(regionGroupSection, 'fields') + + masksSubtask = regionMasksTask.add_mask_subtask(regionGroup) + masksFile = masksSubtask.geojsonFileName + timeSeriesName = masksSubtask.outFileSuffix + + regionNames = masksSubtask.expand_region_names(regionNames) + + self.masksSubtask = masksSubtask + + oceanRegionalProfilesTask.add_region_group( + regionMasksTask, regionGroup, regionNames, + fields, startYear, endYear) + + combineSubtask = oceanRegionalProfilesTask.combineSubtasks[ + regionGroup][(startYear, endYear)] + + for field in fields: + prefix = field['prefix'] + suffix = prefix[0].upper() + prefix[1:] + for regionName in regionNames: + subtaskName = 'plotHovmoller_{}_{}'.format( + prefix, regionName.replace(' ', '_')) + inFileName = \ + '{}/regionalProfiles_{}_{:04d}-{:04d}.nc'.format( + timeSeriesName, timeSeriesName, + startYear, endYear) + titleName = field['titleName'] + caption = 'Time series of {} {} vs ' \ + 'depth'.format(regionName.replace('_', ' '), + titleName) + hovmollerSubtask = PlotHovmollerSubtask( + parentTask=self, + regionName=regionName, + inFileName=inFileName, + outFileLabel='{}_hovmoller'.format(prefix), + fieldNameInTitle=titleName, + mpasFieldName='{}_mean'.format(prefix), + unitsLabel=field['units'], + sectionName='hovmollerOceanRegions{}'.format(suffix), + thumbnailSuffix='', + imageCaption=caption, + galleryGroup='{} Time Series vs Depths'.format( + regionGroup), + groupSubtitle=None, + groupLink='ocnreghovs', + galleryName=titleName, + subtaskName=subtaskName, + controlConfig=controlConfig, + regionMaskFile=masksFile) + hovmollerSubtask.run_after(combineSubtask) + self.add_subtask(hovmollerSubtask) + + # }}} + # }}} + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/ocean/ocean_regional_profiles.py b/mpas_analysis/ocean/ocean_regional_profiles.py index d921d9518..05ef80af0 100644 --- a/mpas_analysis/ocean/ocean_regional_profiles.py +++ b/mpas_analysis/ocean/ocean_regional_profiles.py @@ -11,9 +11,6 @@ # https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/master/LICENSE # -from __future__ import absolute_import, division, print_function, \ - unicode_literals - import xarray as xr import numpy as np import os @@ -23,30 +20,29 @@ from mpas_analysis.shared import AnalysisTask from mpas_analysis.shared.io.utility import build_config_full_path, \ - get_files_year_month, make_directories, decode_strings, get_region_mask + get_files_year_month, make_directories, decode_strings from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf from mpas_analysis.shared.timekeeping.utility import days_to_datetime from mpas_analysis.shared.climatology import compute_climatology from mpas_analysis.shared.constants import constants -from mpas_analysis.ocean.plot_hovmoller_subtask import PlotHovmollerSubtask from mpas_analysis.shared.html import write_image_xml from mpas_analysis.shared.plot import savefig, add_inset class OceanRegionalProfiles(AnalysisTask): # {{{ - ''' + """ Compute and plot vertical profiles of regionally analyzed data. The mean and standard deviation of the data are computed over each region. - The mean isdisplayed as a Hovmoller plot. The mean and std. dev. are - further computed in time (within the requested seasons) and this result - is plotted as a vertical profile with shading showing variability. - ''' + The mean and std. dev. are computed in time (within the requested seasons) + and this result is plotted as a vertical profile with shading showing + variability. + """ # Authors # ------- # Xylar Asay-Davis def __init__(self, config, regionMasksTask, controlConfig=None): # {{{ - ''' + """ Construct the analysis task. Parameters @@ -59,7 +55,7 @@ def __init__(self, config, regionMasksTask, controlConfig=None): # {{{ controlConfig : ``MpasAnalysisConfigParser``, optional Configuration options for a control run (if any) - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -71,99 +67,112 @@ def __init__(self, config, regionMasksTask, controlConfig=None): # {{{ componentName='ocean', tags=['profiles', 'climatology']) - self.startYear = config.getint('climatology', 'startYear') - self.endYear = config.getint('climatology', 'endYear') + self.combineSubtasks = dict() + self.computeSubtasks = dict() + self.masksSubtasks = dict() - self.fields = config.getExpression('oceanRegionalProfiles', 'fields') + startYear = config.getint('climatology', 'startYear') + endYear = config.getint('climatology', 'endYear') - self.seasons = config.getExpression('oceanRegionalProfiles', 'seasons') + regionGroups = config.getExpression('oceanRegionalProfiles', + 'regionGroups') - regionGroup = config.get('oceanRegionalProfiles', 'regionGroup') + for regionGroup in regionGroups: + regionGroupSection = 'profiles{}'.format( + regionGroup.replace(' ', '')) - self.regionNames = config.getExpression('oceanRegionalProfiles', - 'regionNames') - if len(self.regionNames) == 0: - return + fields = config.getExpression(regionGroupSection, 'fields') - plotHovmoller = config.getboolean('oceanRegionalProfiles', - 'plotHovmoller') + seasons = config.getExpression(regionGroupSection, 'seasons') - hovmollerGalleryGroup = config.get('oceanRegionalProfiles', - 'hovmollerGalleryGroup') + regionNames = config.getExpression(regionGroupSection, + 'regionNames') + if len(regionNames) == 0: + return - masksSubtask = regionMasksTask.add_mask_subtask(regionGroup) - masksFile = masksSubtask.geojsonFileName - self.regionMaskSuffix = masksSubtask.outFileSuffix + self.add_region_group(regionMasksTask, regionGroup, regionNames, + fields, startYear, endYear, seasons) - self.regionNames = masksSubtask.expand_region_names(self.regionNames) + combineSubtask = \ + self.combineSubtasks[regionGroup][(startYear, endYear)] - self.masksSubtask = masksSubtask + masksSubtask = self.masksSubtasks[regionGroup] - years = range(self.startYear, self.endYear + 1) + timeSeriesName = masksSubtask.outFileSuffix - # in the end, we'll combine all the time series into one, but we create - # this task first so it's easier to tell it to run after all the - # compute tasks - combineSubtask = CombineRegionalProfileTimeSeriesSubtask( - self, startYears=years, endYears=years) + for field in fields: + for regionName in regionNames: + for season in seasons: + plotSubtask = PlotRegionalProfileTimeSeriesSubtask( + self, masksSubtask, season, regionName, field, + timeSeriesName, startYear, endYear, controlConfig) + plotSubtask.run_after(combineSubtask) + self.add_subtask(plotSubtask) + + # }}} + + def add_region_group(self, regionMasksTask, regionGroup, regionNames, + fields, startYear, endYear, seasons=None): + """ + Add years to the profiles to compute + + Parameters + ---------- + startYear : int + The start year of the time series + + endYear : int + The end year + + """ + if regionGroup in self.masksSubtasks: + masksSubtask = self.masksSubtasks[regionGroup] + else: + masksSubtask = regionMasksTask.add_mask_subtask(regionGroup) + self.masksSubtasks[regionGroup] = masksSubtask + + if regionGroup not in self.computeSubtasks: + self.computeSubtasks[regionGroup] = dict() + if regionGroup not in self.combineSubtasks: + self.combineSubtasks[regionGroup] = dict() + + timeSeriesName = masksSubtask.outFileSuffix + + if seasons is None: + seasons = [] + + key = (startYear, endYear) + years = range(startYear, endYear + 1) + if key in self.combineSubtasks[regionGroup]: + combineSubtask = self.combineSubtasks[regionGroup][key] + # add any missing fields and seasons + _update_fields(combineSubtask.fields, fields) + combineSubtask.seasons = list(set(seasons + combineSubtask.seasons)) + else: + combineSubtask = CombineRegionalProfileTimeSeriesSubtask( + self, regionGroup, timeSeriesName, seasons, fields, + startYears=years, endYears=years) + self.combineSubtasks[regionGroup][key] = combineSubtask # run one subtask per year for year in years: - computeSubtask = ComputeRegionalProfileTimeSeriesSubtask( - self, startYear=year, endYear=year) - computeSubtask.run_after(masksSubtask) - combineSubtask.run_after(computeSubtask) - - if plotHovmoller: - for field in self.fields: - prefix = field['prefix'] - for regionName in self.regionNames: - subtaskName = 'plotHovmoller_{}_{}'.format( - prefix, regionName.replace(' ', '_')) - inFileName = \ - '{}/regionalProfiles_{}_{:04d}-{:04d}.nc'.format( - self.regionMaskSuffix, self.regionMaskSuffix, - self.startYear, self.endYear) - titleName = field['titleName'] - caption = 'Time series of {} {} vs ' \ - 'depth'.format(regionName.replace('_', ' '), - titleName) - hovmollerSubtask = PlotHovmollerSubtask( - parentTask=self, - regionName=regionName, - inFileName=inFileName, - outFileLabel='{}_hovmoller'.format(prefix), - fieldNameInTitle=titleName, - mpasFieldName='{}_mean'.format(prefix), - unitsLabel=field['units'], - sectionName='{}OceanRegionalHovmoller'.format(prefix), - thumbnailSuffix='', - imageCaption=caption, - galleryGroup=hovmollerGalleryGroup, - groupSubtitle=None, - groupLink='ocnreghovs', - galleryName=titleName, - subtaskName=subtaskName, - controlConfig=controlConfig, - regionMaskFile=masksFile) - hovmollerSubtask.run_after(combineSubtask) - self.add_subtask(hovmollerSubtask) - - for field in self.fields: - prefix = field['prefix'] - for regionName in self.regionNames: - for season in self.seasons: - plotSubtask = PlotRegionalProfileTimeSeriesSubtask( - self, season, regionName, field, controlConfig) - plotSubtask.run_after(combineSubtask) - self.add_subtask(plotSubtask) + key = (year, year) + if key in self.computeSubtasks[regionGroup]: + computeSubtask = self.computeSubtasks[regionGroup][key] + _update_fields(computeSubtask.fields, fields) + else: + computeSubtask = ComputeRegionalProfileTimeSeriesSubtask( + self, masksSubtask, regionGroup, regionNames, fields, + startYear=year, endYear=year) + computeSubtask.run_after(masksSubtask) + combineSubtask.run_after(computeSubtask) + self.computeSubtasks[regionGroup][key] = computeSubtask - # }}} # }}} class ComputeRegionalProfileTimeSeriesSubtask(AnalysisTask): # {{{ - ''' + """ Compute regional statistics on each layer and time point of a set of MPAS fields @@ -174,51 +183,68 @@ class ComputeRegionalProfileTimeSeriesSubtask(AnalysisTask): # {{{ startYear, endYear : int The beginning and end of the time series to compute - ''' + """ # Authors # ------- # Xylar Asay-Davis - def __init__(self, parentTask, startYear, endYear): # {{{ - ''' + def __init__(self, parentTask, masksSubtask, regionGroup, regionNames, + fields, startYear, endYear): # {{{ + """ Construct the analysis task. Parameters ---------- - parentTask : ``OceanRegionalProfiles`` + parentTask : mpas_analysis.ocean.OceanRegionalProfiles The main task of which this is a subtask + masksSubtask : mpas_analysis.shared.regions.ComputeRegionMasksSubtask + A task for computing region masks + + regionGroup : str + The name of the region group for which the region masks are defined + + regionNames : list + The list of region names to compute and plot + + fields : list + A list of dictionaries defining the fields to compute profile + time series for + startYear, endYear : int The beginning and end of the time series to compute - ''' + """ # Authors # ------- # Xylar Asay-Davis + subtaskName = 'compute{}Profiles_{:04d}-{:04d}'.format( + regionGroup.replace(' ', ''), startYear, endYear) # first, call the constructor from the base class (AnalysisTask) super(ComputeRegionalProfileTimeSeriesSubtask, self).__init__( config=parentTask.config, taskName=parentTask.taskName, componentName=parentTask.componentName, tags=parentTask.tags, - subtaskName='computeRegionalProfileTimeSeries_{:04d}-{:04d}' - ''.format(startYear, endYear)) + subtaskName=subtaskName) parentTask.add_subtask(self) - self.parentTask = parentTask + self.masksSubtask = masksSubtask + self.regionNames = regionNames + self.fields = fields self.startYear = startYear self.endYear = endYear # }}} def setup_and_check(self): # {{{ - ''' + """ Perform steps to set up the analysis and check for errors in the setup. Raises ------ ValueError if timeSeriesStatsMonthly is not enabled in the MPAS run - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -237,9 +263,9 @@ def setup_and_check(self): # {{{ # }}} def run_task(self): # {{{ - ''' + """ Compute time series of regional profiles - ''' + """ # Authors # ------- # Milena Veneziani, Mark Petersen, Phillip J. Wolfram, Xylar Asay-Davis @@ -249,7 +275,7 @@ def run_task(self): # {{{ startDate = '{:04d}-01-01_00:00:00'.format(self.startYear) endDate = '{:04d}-12-31_23:59:59'.format(self.endYear) - timeSeriesName = self.parentTask.regionMaskSuffix + timeSeriesName = self.masksSubtask.outFileSuffix outputDirectory = '{}/{}/'.format( build_config_full_path(self.config, 'output', @@ -271,7 +297,7 @@ def run_task(self): # {{{ self.historyStreams, 'timeSeriesStatsMonthlyOutput') - variableList = [field['mpas'] for field in self.parentTask.fields] + variableList = [field['mpas'] for field in self.fields] outputExists = os.path.exists(outputFileName) outputValid = outputExists @@ -313,14 +339,14 @@ def run_task(self): # {{{ vertMask = vertIndex < dsRestart.maxLevelCell # get region masks - regionMaskFileName = self.parentTask.masksSubtask.maskFileName + regionMaskFileName = self.masksSubtask.maskFileName dsRegionMask = xr.open_dataset(regionMaskFileName) # figure out the indices of the regions to plot regionNames = decode_strings(dsRegionMask.regionNames) regionIndices = [] - for regionToPlot in self.parentTask.regionNames: + for regionToPlot in self.regionNames: for index, regionName in enumerate(regionNames): if regionToPlot == regionName: regionIndices.append(index) @@ -351,7 +377,7 @@ def run_task(self): # {{{ # for each region and variable, compute area-weighted sum and # squared sum - for field in self.parentTask.fields: + for field in self.fields: variableName = field['mpas'] prefix = field['prefix'] self.logger.info(' {}'.format(field['titleName'])) @@ -376,9 +402,9 @@ def run_task(self): # {{{ dsOut.coords['regionNames'] = regionNamesVar dsOut['totalArea'] = totalArea - dsOut.coords['year'] = (('Time'), years) + dsOut.coords['year'] = (('Time',), years) dsOut['year'].attrs['units'] = 'years' - dsOut.coords['month'] = (('Time'), months) + dsOut.coords['month'] = (('Time',), months) dsOut['month'].attrs['units'] = 'months' # Note: restart file, not a mesh file because we need refBottomDepth, @@ -395,7 +421,7 @@ def run_task(self): # {{{ z[0] = -0.5 * depths[0] z[1:] = -0.5 * (depths[0:-1] + depths[1:]) - dsOut.coords['z'] = (('nVertLevels'), z) + dsOut.coords['z'] = (('nVertLevels',), z) dsOut['z'].attrs['units'] = 'meters' write_netcdf(dsOut, outputFileName) @@ -404,15 +430,16 @@ def run_task(self): # {{{ class CombineRegionalProfileTimeSeriesSubtask(AnalysisTask): # {{{ - ''' + """ Combine individual time series into a single data set - ''' + """ # Authors # ------- # Xylar Asay-Davis - def __init__(self, parentTask, startYears, endYears): # {{{ - ''' + def __init__(self, parentTask, regionGroup, timeSeriesName, seasons, fields, + startYears, endYears): # {{{ + """ Construct the analysis task. Parameters @@ -420,36 +447,50 @@ def __init__(self, parentTask, startYears, endYears): # {{{ parentTask : ``OceanRegionalProfiles`` The main task of which this is a subtask - startYear, endYear : list of int + regionGroup : str + The name of the region group for which the region masks are defined + + seasons : list + A list of seasons to compute statistic on + + fields : list + A list of dictionaries defining the fields to compute profile + time series for + + startYears, endYears : list The beginning and end of each time series to combine - ''' + """ # Authors # ------- # Xylar Asay-Davis + subtaskName = 'combine{}Profiles_{:04d}-{:04d}'.format( + regionGroup.replace(' ', ''), startYears[0], endYears[-1]) # first, call the constructor from the base class (AnalysisTask) super(CombineRegionalProfileTimeSeriesSubtask, self).__init__( config=parentTask.config, taskName=parentTask.taskName, componentName=parentTask.componentName, tags=parentTask.tags, - subtaskName='combineRegionalProfileTimeSeries') + subtaskName=subtaskName) parentTask.add_subtask(self) - self.parentTask = parentTask self.startYears = startYears self.endYears = endYears + self.timeSeriesName = timeSeriesName + self.seasons = seasons + self.fields = fields # }}} def run_task(self): # {{{ - ''' + """ Combine the time series - ''' + """ # Authors # ------- # Xylar Asay-Davis - timeSeriesName = self.parentTask.regionMaskSuffix + timeSeriesName = self.timeSeriesName outputDirectory = '{}/{}/'.format( build_config_full_path(self.config, 'output', @@ -461,6 +502,7 @@ def run_task(self): # {{{ self.endYears[-1]) useExisting = False + ds = None if os.path.exists(outputFileName): ds = xr.open_dataset(outputFileName, decode_times=False) if ds.sizes['Time'] > 0: @@ -495,7 +537,7 @@ def run_task(self): # {{{ make_directories(outputDirectory) - for season in self.parentTask.seasons: + for season in self.seasons: outputFileName = \ '{}/{}_{}_{:04d}-{:04d}.nc'.format( outputDirectory, timeSeriesName, season, @@ -506,7 +548,7 @@ def run_task(self): # {{{ calendar=self.calendar, maskVaries=False) - for field in self.parentTask.fields: + for field in self.fields: prefix = field['prefix'] mean = dsSeason['{}_mean'.format(prefix)].where( @@ -528,7 +570,7 @@ def run_task(self): # {{{ class PlotRegionalProfileTimeSeriesSubtask(AnalysisTask): # {{{ - ''' + """ Plot a profile averaged over an ocean region and in time, along with variability in both space and time. @@ -548,14 +590,15 @@ class PlotRegionalProfileTimeSeriesSubtask(AnalysisTask): # {{{ controlConfig : ``MpasAnalysisConfigParser`` Configuration options for a control run (if any) - ''' + """ # Authors # ------- # Xylar Asay-Davis - def __init__(self, parentTask, season, regionName, field, controlConfig): + def __init__(self, parentTask, masksSubtask, season, regionName, field, + timeSeriesName, startYear, endYear, controlConfig): # {{{ - ''' + """ Construct the analysis task. Parameters @@ -563,6 +606,9 @@ def __init__(self, parentTask, season, regionName, field, controlConfig): parentTask : OceanRegionalProfiles The parent task of which this is a subtask + masksSubtask : mpas_analysis.shared.regions.ComputeRegionMasksSubtask + A task for computing region masks + season : str The season being plotted @@ -572,9 +618,16 @@ def __init__(self, parentTask, season, regionName, field, controlConfig): field : dict Information about the field (e.g. temperature) being plotted + timeSeriesName : str + The name of the time series, related to the name of the region + group but appropriate for a file prefix or suffix + + startYear, endYear : int + The beginning and end of the time series to compute + controlConfig : ``MpasAnalysisConfigParser``, optional Configuration options for a control run (if any) - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -590,19 +643,23 @@ def __init__(self, parentTask, season, regionName, field, controlConfig): tags=parentTask.tags, subtaskName=subtaskName) - self.parentTask = parentTask self.controlConfig = controlConfig + self.masksSubtask = masksSubtask + self.timeSeriesName = timeSeriesName + self.startYear = startYear + self.endYear = endYear self.season = season self.regionName = regionName self.field = field - + self.filePrefix = None + self.xmlFileNames = [] # }}} def setup_and_check(self): # {{{ - ''' + """ Perform steps to set up the analysis and check for errors in the setup. - ''' + """ # Authors # ------- # Xylar Asay-Davis @@ -614,13 +671,10 @@ def setup_and_check(self): # {{{ # self.calendar super(PlotRegionalProfileTimeSeriesSubtask, self).setup_and_check() - self.xmlFileNames = [] - self.filePrefixes = {} - self.filePrefix = 'regionalProfile_{}_{}_{}_years{:04d}-{:04d}'.format( self.field['prefix'], self.regionName.replace(' ', '_'), - self.season, self.parentTask.startYear, - self.parentTask.endYear) + self.season, self.startYear, + self.endYear) self.xmlFileNames = ['{}/{}.xml'.format(self.plotsDirectory, self.filePrefix)] # }}} @@ -634,10 +688,10 @@ def run_task(self): # {{{ # Xylar Asay-Davis config = self.config - startYear = self.parentTask.startYear - endYear = self.parentTask.endYear + startYear = self.startYear + endYear = self.endYear - regionMaskFile = self.parentTask.masksSubtask.geojsonFileName + regionMaskFile = self.masksSubtask.geojsonFileName fcAll = read_feature_collection(regionMaskFile) @@ -649,10 +703,14 @@ def run_task(self): # {{{ inDirectory = build_config_full_path(config, 'output', 'profilesSubdirectory') - timeSeriesName = self.parentTask.regionMaskSuffix + timeSeriesName = self.timeSeriesName inFileName = '{}/{}_{}_{:04d}-{:04d}.nc'.format( inDirectory, timeSeriesName, self.season, - self.parentTask.startYear, self.parentTask.endYear) + self.startYear, self.endYear) + + regionGroup = self.masksSubtask.regionGroup + regionGroupSection = 'profiles{}'.format( + regionGroup.replace(' ', '')) ds = xr.open_dataset(inFileName) allRegionNames = decode_strings(ds.regionNames) @@ -663,7 +721,7 @@ def run_task(self): # {{{ stdFieldName = '{}_std'.format(self.field['prefix']) mainRunName = config.get('runs', 'mainRunName') - profileGalleryGroup = config.get('oceanRegionalProfiles', + profileGalleryGroup = config.get(regionGroupSection, 'profileGalleryGroup') titleFieldName = self.field['titleName'] @@ -726,8 +784,7 @@ def run_task(self): # {{{ fieldArrays.append(dsControl[meanFieldName].values) errArrays.append(dsControl[stdFieldName].values) - depthRange = config.getExpression('oceanRegionalProfiles', - 'depthRange') + depthRange = config.getExpression(regionGroupSection, 'depthRange') if len(depthRange) == 0: depthRange = None @@ -863,4 +920,21 @@ def plot(self, zArrays, fieldArrays, errArrays, lineColors, lineWidths, # }}} + +def _update_fields(fields, newFields): + for outer in range(len(newFields)): + found = False + for inner in range(len(fields)): + if fields[inner]['prefix'] == newFields[outer]['prefix']: + for item in ['mpas', 'units', 'titleName']: + if fields[inner][item] != newFields[outer][item]: + raise ValueError( + 'item {} in fields is not consistent between ' + 'profiles and Hovmoller tasks, which will have ' + 'unexpected consequences') + found = True + break + if not found: + fields.append(newFields[outer]) + # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python From 64e8c7d5eb727f09f1d02ac555ab0ad965df8b4d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 28 Nov 2020 13:36:24 +0100 Subject: [PATCH 57/58] Support regional hovmoller anomalies Anomalies are with respect to the beginning of the time series, since it is currently not easy to reference to another year. --- configs/polarRegions.conf | 9 + mpas_analysis/config.default | 21 +- .../ocean/hovmoller_ocean_regions.py | 216 ++++++++++++++++-- .../ocean/ocean_regional_profiles.py | 6 +- mpas_analysis/ocean/plot_hovmoller_subtask.py | 2 +- mpas_analysis/shared/time_series/anomaly.py | 4 +- 6 files changed, 224 insertions(+), 34 deletions(-) diff --git a/configs/polarRegions.conf b/configs/polarRegions.conf index 859f9db12..99f6cd085 100644 --- a/configs/polarRegions.conf +++ b/configs/polarRegions.conf @@ -269,6 +269,15 @@ regionNames = ["Southern Ocean 60S", "Weddell Sea Shelf", "Western Ross Sea Deep", "East Antarctic Seas Shelf", "East Antarctic Seas Deep"] +# whether to compute an anomaly with respect to the start of the time series +computeAnomaly = False + +# Number of points over which to compute moving average(e.g., for monthly +# output, movingAverageMonths=12 corresponds to a 12-month moving average +# window) +movingAverageMonths = 12 + + [hovmollerOceanRegionsPotentialTemperature] ## options related to plotting time series of temperature vs. depth in ocean ## regions diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 9df61fc15..41038f2cf 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -3006,16 +3006,19 @@ regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", "Global Ocean", "Global Ocean 65N to 65S", "Global Ocean 15S to 15N"] - -[hovmollerOceanRegionsPotentialTemperature] -## options related to plotting time series of temperature vs. depth in ocean -## regions +# whether to compute an anomaly with respect to the start of the time series +computeAnomaly = False # Number of points over which to compute moving average(e.g., for monthly # output, movingAverageMonths=12 corresponds to a 12-month moving average # window) movingAverageMonths = 12 + +[hovmollerOceanRegionsPotentialTemperature] +## options related to plotting time series of temperature vs. depth in ocean +## regions + # colormap colormapNameResult = RdYlBu_r # whether the colormap is indexed or continuous @@ -3056,11 +3059,6 @@ contourLevels = 'none' ## options related to plotting time series of salinity vs. depth in ocean ## regions -# Number of points over which to compute moving average(e.g., for monthly -# output, movingAverageMonths=12 corresponds to a 12-month moving average -# window) -movingAverageMonths = 12 - # colormap colormapNameResult = haline # whether the colormap is indexed or continuous @@ -3101,11 +3099,6 @@ contourLevels = 'none' ## options related to plotting time series of potential density vs. depth in ## ocean regions -# Number of points over which to compute moving average(e.g., for monthly -# output, movingAverageMonths=12 corresponds to a 12-month moving average -# window) -movingAverageMonths = 12 - # colormap colormapNameResult = Spectral_r # whether the colormap is indexed or continuous diff --git a/mpas_analysis/ocean/hovmoller_ocean_regions.py b/mpas_analysis/ocean/hovmoller_ocean_regions.py index 5a6a4a489..7b92d4d4d 100644 --- a/mpas_analysis/ocean/hovmoller_ocean_regions.py +++ b/mpas_analysis/ocean/hovmoller_ocean_regions.py @@ -11,9 +11,25 @@ # https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/master/LICENSE # +import os +import xarray + from mpas_analysis.shared import AnalysisTask from mpas_analysis.ocean.plot_hovmoller_subtask import PlotHovmollerSubtask +from mpas_analysis.shared.io import write_netcdf + +from mpas_analysis.shared.timekeeping.utility import \ + get_simulation_start_time, string_to_datetime + +from mpas_analysis.shared.timekeeping.MpasRelativeDelta import \ + MpasRelativeDelta + +from mpas_analysis.shared.io.utility import build_config_full_path + +from mpas_analysis.shared.time_series import \ + compute_moving_avg_anomaly_from_start + class HovmollerOceanRegions(AnalysisTask): # {{{ """ @@ -75,11 +91,14 @@ def __init__(self, config, regionMasksTask, oceanRegionalProfilesTask, if len(regionNames) == 0: return + computeAnomaly = config.getboolean(regionGroupSection, + 'computeAnomaly') + fields = config.getExpression(regionGroupSection, 'fields') masksSubtask = regionMasksTask.add_mask_subtask(regionGroup) masksFile = masksSubtask.geojsonFileName - timeSeriesName = masksSubtask.outFileSuffix + timeSeriesName = regionGroup.replace(' ', '') regionNames = masksSubtask.expand_region_names(regionNames) @@ -92,20 +111,60 @@ def __init__(self, config, regionMasksTask, oceanRegionalProfilesTask, combineSubtask = oceanRegionalProfilesTask.combineSubtasks[ regionGroup][(startYear, endYear)] + movingAverageMonths = config.getint( + regionGroupSection, 'movingAverageMonths') + + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + # PlotHovmollerSubtask requires a relative path + inFileName = \ + '{}/regionalProfiles_{}_{:04d}-{:04d}.nc'.format( + timeSeriesName, timeSeriesName, + startYear, endYear) + if computeAnomaly: + inFullPath = '{}/{}'.format(baseDirectory, inFileName) + outFileName = \ + '{}/anomaly_{}_{:04d}-{:04d}.nc'.format( + timeSeriesName, timeSeriesName, + startYear, endYear) + outFullPath = '{}/{}'.format(baseDirectory, outFileName) + anomalySubtask = ComputeHovmollerAnomalySubtask( + self, inFullPath, outFullPath, movingAverageMonths) + self.add_subtask(anomalySubtask) + anomalySubtask.run_after(combineSubtask) + # PlotHovmollerSubtask requires a relative path + inFileName = outFileName + else: + anomalySubtask = None + for field in fields: prefix = field['prefix'] suffix = prefix[0].upper() + prefix[1:] + fieldSectionName = 'hovmollerOceanRegions{}'.format(suffix) + + config.set(fieldSectionName, 'movingAverageMonths', + '{}'.format(movingAverageMonths)) + for regionName in regionNames: + if computeAnomaly: + titleName = '{} Anomaly'.format(field['titleName']) + caption = 'Anomaly of {} {} vs ' \ + 'depth'.format(regionName.replace('_', ' '), + titleName) + galleryGroup = '{} Anomaly vs Depths'.format( + regionGroup) + else: + titleName = field['titleName'] + anomalySubtask = None + caption = 'Time series of {} {} vs ' \ + 'depth'.format(regionName.replace('_', ' '), + titleName) + galleryGroup = '{} Time Series vs Depths'.format( + regionGroup) + subtaskName = 'plotHovmoller_{}_{}'.format( prefix, regionName.replace(' ', '_')) - inFileName = \ - '{}/regionalProfiles_{}_{:04d}-{:04d}.nc'.format( - timeSeriesName, timeSeriesName, - startYear, endYear) - titleName = field['titleName'] - caption = 'Time series of {} {} vs ' \ - 'depth'.format(regionName.replace('_', ' '), - titleName) hovmollerSubtask = PlotHovmollerSubtask( parentTask=self, regionName=regionName, @@ -114,21 +173,150 @@ def __init__(self, config, regionMasksTask, oceanRegionalProfilesTask, fieldNameInTitle=titleName, mpasFieldName='{}_mean'.format(prefix), unitsLabel=field['units'], - sectionName='hovmollerOceanRegions{}'.format(suffix), + sectionName=fieldSectionName, thumbnailSuffix='', imageCaption=caption, - galleryGroup='{} Time Series vs Depths'.format( - regionGroup), + galleryGroup=galleryGroup, groupSubtitle=None, - groupLink='ocnreghovs', + groupLink='ocnreghovs_{}'.format( + regionGroup.replace(' ', '').lower()), galleryName=titleName, subtaskName=subtaskName, controlConfig=controlConfig, regionMaskFile=masksFile) - hovmollerSubtask.run_after(combineSubtask) + if computeAnomaly: + hovmollerSubtask.run_after(anomalySubtask) + else: + hovmollerSubtask.run_after(combineSubtask) self.add_subtask(hovmollerSubtask) + self.run_after(oceanRegionalProfilesTask) + # }}} + # }}} + + +class ComputeHovmollerAnomalySubtask(AnalysisTask): + """ + A subtask for computing anomalies of moving averages and writing them out. + + Attributes + ---------- + inFileName : str + The file name for the time series + + outFileName : str + The file name (usually without full path) where the resulting + data set should be written + + movingAveragePoints : int + The number of points (months) used in the moving average used to + smooth the data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, inFileName, outFileName, + movingAverageMonths, subtaskName='computeAnomaly'): # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task of which this is a subtask + + inFileName : str + The file name for the time series + + outFileName : str + The file name for the anomaly + + movingAverageMonths : int + The number of months used in the moving average used to + smooth the data set + + subtaskName : str, optional + The name of the subtask + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeHovmollerAnomalySubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName='ocean', + tags=parentTask.tags, + subtaskName=subtaskName) + + self.inFileName = inFileName + self.outFileName = outFileName + self.movingAverageMonths = movingAverageMonths + + # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(ComputeHovmollerAnomalySubtask, self).setup_and_check() + + startDate = self.config.get('timeSeries', 'startDate') + endDate = self.config.get('timeSeries', 'endDate') + + delta = MpasRelativeDelta(string_to_datetime(endDate), + string_to_datetime(startDate), + calendar=self.calendar) + + months = delta.months + 12*delta.years + + if months <= self.movingAverageMonths: + raise ValueError('Cannot meaningfully perform a rolling mean ' + 'because the time series is too short.') + # }}} + + def run_task(self): # {{{ + """ + Performs analysis of ocean heat content (OHC) from time-series output. + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Greg Streletz + + self.logger.info("\nComputing anomalies...") + + config = self.config + + ds = xarray.open_dataset(self.inFileName) + + dsStart = ds.isel(Time=slice(0, self.movingAverageMonths)).mean('Time') + + for variable in ds.data_vars: + ds[variable] = ds[variable] - dsStart[variable] + + outFileName = self.outFileName + if not os.path.isabs(outFileName): + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + outFileName = '{}/{}'.format(baseDirectory, + outFileName) + + write_netcdf(ds, outFileName) # }}} + # }}} + # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/ocean/ocean_regional_profiles.py b/mpas_analysis/ocean/ocean_regional_profiles.py index 05ef80af0..4813c054b 100644 --- a/mpas_analysis/ocean/ocean_regional_profiles.py +++ b/mpas_analysis/ocean/ocean_regional_profiles.py @@ -98,7 +98,7 @@ def __init__(self, config, regionMasksTask, controlConfig=None): # {{{ masksSubtask = self.masksSubtasks[regionGroup] - timeSeriesName = masksSubtask.outFileSuffix + timeSeriesName = regionGroup.replace(' ', '') for field in fields: for regionName in regionNames: @@ -136,7 +136,7 @@ def add_region_group(self, regionMasksTask, regionGroup, regionNames, if regionGroup not in self.combineSubtasks: self.combineSubtasks[regionGroup] = dict() - timeSeriesName = masksSubtask.outFileSuffix + timeSeriesName = regionGroup.replace(' ', '') if seasons is None: seasons = [] @@ -275,7 +275,7 @@ def run_task(self): # {{{ startDate = '{:04d}-01-01_00:00:00'.format(self.startYear) endDate = '{:04d}-12-31_23:59:59'.format(self.endYear) - timeSeriesName = self.masksSubtask.outFileSuffix + timeSeriesName = self.masksSubtask.regionGroup.replace(' ', '') outputDirectory = '{}/{}/'.format( build_config_full_path(self.config, 'output', diff --git a/mpas_analysis/ocean/plot_hovmoller_subtask.py b/mpas_analysis/ocean/plot_hovmoller_subtask.py index 46b40a6ed..d495ea009 100644 --- a/mpas_analysis/ocean/plot_hovmoller_subtask.py +++ b/mpas_analysis/ocean/plot_hovmoller_subtask.py @@ -295,7 +295,7 @@ def run_task(self): # {{{ xLabel = 'Time (years)' yLabel = 'Depth (m)' - title = '{}, {}'.format(self.fieldNameInTitle, regionNameInTitle) + title = '{}\n{}'.format(self.fieldNameInTitle, regionNameInTitle) outFileName = '{}/{}.png'.format(self.plotsDirectory, self.filePrefix) diff --git a/mpas_analysis/shared/time_series/anomaly.py b/mpas_analysis/shared/time_series/anomaly.py index 4993f5242..04e2f08f0 100644 --- a/mpas_analysis/shared/time_series/anomaly.py +++ b/mpas_analysis/shared/time_series/anomaly.py @@ -36,8 +36,8 @@ def compute_moving_avg_anomaly_from_start(timeSeriesFileName, variableList, variableList : list of str variable names to include in the resulting data set - simulationStartTime : str - the start date of the simulation + anomalyStartTime, anomalyEndTime : str + the start and end times of the reference point for the anomaly startDate, endDate : str the start and end dates of the time series From 470701416213d5aad6d9fe55212978005f73feba Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 28 Nov 2020 10:38:53 -0600 Subject: [PATCH 58/58] Adjust suptitle position for vertical section plots --- mpas_analysis/shared/plot/vertical_section.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/shared/plot/vertical_section.py b/mpas_analysis/shared/plot/vertical_section.py index 24606a3c4..3fde5e549 100644 --- a/mpas_analysis/shared/plot/vertical_section.py +++ b/mpas_analysis/shared/plot/vertical_section.py @@ -482,9 +482,9 @@ def plot_vertical_section_comparison( if thirdXAxisData is not None and refArray is None: plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.0, 0.0, 1.0, 0.98]) else: - plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.0, 0.0, 1.0, 0.9]) + plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.0, 0.0, 1.0, 0.95]) else: - plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.01, 0.0, 1.0, 0.93]) + plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.01, 0.0, 1.0, 0.97]) return fig, axes, suptitle