diff --git a/README.md b/README.md index acdee0681..fa3326b2d 100644 --- a/README.md +++ b/README.md @@ -83,6 +83,33 @@ for more details. `--generate` flag on the command line. See the comments in `mpas_analysis/config.default` for more details on this option. +## List of MPAS output files that are needed by MPAS-Analysis: + + * mpas-o files: + * `mpaso.hist.am.timeSeriesStatsMonthly.*.nc` (Note: since OHC + anomalies are computed wrt the first year of the simulation, + if OHC diagnostics is activated, the analysis will need the + first full year of `mpaso.hist.am.timeSeriesStatsMonthly.*.nc` + files, no matter what `[timeSeries]/startYear` and + `[timeSeries]/endYear` are. This is especially important to know if + short term archiving is used in the run to analyze: in that case, set + `[input]/runSubdirectory`, `[input]/oceanHistorySubdirectory` and + `[input]/seaIceHistorySubdirectory` to the appropriate run and archive + directories and choose `[timeSeries]/startYear` and + `[timeSeries]/endYear` to include only data that have been short-term + archived). + * `mpaso.hist.am.meridionalHeatTransport.0001-03-01.nc` (or any + `hist.am.meridionalHeatTransport` file) + * `mpaso.rst.0002-01-01_00000.nc` (or any other mpas-o restart file) + * `streams.ocean` + * `mpas-o_in` + * mpas-cice files: + * `mpascice.hist.am.timeSeriesStatsMonthly.*.nc` + * `mpascice.rst.0002-01-01_00000.nc` (or any other mpas-cice restart + file) + * `streams.cice` + * `mpas-cice_in` + ## Purge Old Analysis To purge old analysis (delete the whole output directory) before running run diff --git a/docs/api.rst b/docs/api.rst index 3623557ec..ff4e7b86e 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -11,12 +11,12 @@ Top-level script: run_mpas_analysis .. autosummary:: :toctree: generated/ - run_mpas_analysis.update_generate - run_mpas_analysis.run_parallel_tasks - run_mpas_analysis.wait_for_task run_mpas_analysis.build_analysis_list run_mpas_analysis.determine_analyses_to_generate + run_mpas_analysis.add_task_and_subtasks + run_mpas_analysis.update_generate run_mpas_analysis.run_analysis + run_mpas_analysis.wait_for_task Analysis tasks @@ -32,7 +32,9 @@ Base Class AnalysisTask AnalysisTask.setup_and_check - AnalysisTask.run_analysis + AnalysisTask.run_task + AnalysisTask.run_after + AnalysisTask.add_subtask AnalysisTask.run AnalysisTask.check_generate AnalysisTask.check_analysis_enabled @@ -109,17 +111,20 @@ Climatology .. autosummary:: :toctree: generated/ - get_lat_lon_comparison_descriptor + get_comparison_descriptor + get_antarctic_stereographic_projection get_remapper - get_mpas_climatology_dir_name get_observation_climatology_file_names compute_monthly_climatology compute_climatology - cache_climatologies - update_climatology_bounds_from_file_names add_years_months_days_in_month - remap_and_write_climatology - compute_climatologies_with_ncclimo + + MpasClimatologyTask + MpasClimatologyTask.add_variables + MpasClimatologyTask.get_file_name + + RemapMpasClimatologySubtask + RemapMpasClimatologySubtask.get_file_name Time Series ----------- diff --git a/mpas_analysis/analysis_task_template.py b/mpas_analysis/analysis_task_template.py index 17dd3bb4a..4145d685c 100644 --- a/mpas_analysis/analysis_task_template.py +++ b/mpas_analysis/analysis_task_template.py @@ -85,7 +85,7 @@ class MyTask(AnalysisTask): # {{{ # def __init__(self, config, fieldName):... # and yu would then make a new task something like this: # myTask = MyTask(config, fieldName='seaIceArea') - def __init__(self, config, myArg='myDefaultValue'): # {{{ + def __init__(self, config, prerequsiteTask, myArg='myDefaultValue'): # {{{ ''' Construct the analysis task. @@ -95,9 +95,15 @@ def __init__(self, config, myArg='myDefaultValue'): # {{{ config : instance of MpasAnalysisConfigParser Contains configuration options + prerequsiteTask : ``AnotherTaskClass`` + + myArg : str, optional + Authors ------- @@ -136,6 +142,40 @@ def __init__(self, config, myArg='myDefaultValue'): # {{{ # Example: # self.fieldName = fieldName + # If you need to, you can add task that needs to run before this task + # (a prerequisite), just add it as follows: + self.prerequsiteTask = prerequsiteTask + self.run_after(prerequsiteTask) + + # You may want to break this task into several subtasks that can run + # in parallel with one another or one after the other, depending on + # how you set them up. + # + # An example where the subtasks run in parallel with one another: + for season in ['JFM', 'JAS', 'ANN']: + subtask = MySubtask(parentTask=self, season=season) + self.add_subtask(subtask) + + # An example where the subtasks run in sequence because each one + # depends on the previous one + remapObservations = MyRemapObservationsSubtask(parentTask=self) + # You can make sure MyRemapObservationsSubtask also runs after the + # prerequisite task: + remapObservations.run_after(prerequsiteTask) + self.add_subtask(remapObservations) + + plotObservations = MyPlotObservationsSubtask( + remapObservations=remapObservations) + # This is the part that makes sure MyPlotObservationsSubtask runs after + # MyRemapObservationsSubtask. Note: you might do this inside of + # MyPlotObservationsSubtask instead of here. + plotObservations.run_after(remapObservations) + self.add_subtask(plotObservations) + + # Note: I have not included stubs for MyRemapObservationsSubtask and + # MyPlotObservationsSubtask but they would be qualitatively similar + # to MySubtask below. + # }}} # this function will be called to figure out if the analysis task should @@ -349,10 +389,35 @@ def _make_plot(self, plotParameter, optionalArgument=None): # {{{ imageDescription=caption, imageCaption=caption) - - # # }}} + +class MySubtask(AnalysisTask): + def __init__(self, parentTask, season): + self.parentTask = parentTask + self.season = season + super(MySubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + subtaskName=season, + componentName=parentTask.component, + tags=parentTask.tags) + + def setup_and_check(self): + # do whatever setup is needed for the subtask. You don't have + # to redundantly do setup that happened in parentTask because + # you can access its fields if needed + assert(self.parentTask.streamName == + 'timeSeriesStatsMonthlyOutput') + + def run_analsysis(self): + # do the main action of the subplot. Note: you can't access any + # fields created when parentTask runs for 2 reasions: 1) parentTask + # runs after this task and 2) parentTask and all other tasks may + # run in a separate process from this task so the data will not be + # communicated to this process. + pass + # }}} # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/config.default b/mpas_analysis/config.default index 1959c0721..05dfeb97d 100644 --- a/mpas_analysis/config.default +++ b/mpas_analysis/config.default @@ -522,7 +522,10 @@ colorbarLevelsDifference = [-5, -3, -2, -1, 0, 1, 2, 3, 5] # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, # Nov, Dec, JFM, AMJ, JAS, OND, ANN) -comparisonTimes = ['JFM', 'JAS', 'ANN'] +seasons = ['JFM', 'JAS', 'ANN'] + +# comparison grid(s) ('lonlat', 'anatarctic') on which to plot analysis +comparisonGrids = ['latlon'] [climatologyMapSSS] ## options related to plotting horizontally remapped climatologies of @@ -544,7 +547,10 @@ colorbarLevelsDifference = [-3, -2, -1, -0.5, 0, 0.5, 1, 2, 3] # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, # Nov, Dec, JFM, AMJ, JAS, OND, ANN) -comparisonTimes = ['JFM', 'JAS', 'ANN'] +seasons = ['JFM', 'JAS', 'ANN'] + +# comparison grid(s) ('lonlat', 'anatarctic') on which to plot analysis +comparisonGrids = ['latlon'] [climatologyMapMLD] ## options related to plotting horizontally remapped climatologies of @@ -566,7 +572,10 @@ colorbarLevelsDifference = [-150, -80, -30, -10, 0, 10, 30, 80, 150] # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, # Nov, Dec, JFM, AMJ, JAS, OND, ANN) -comparisonTimes = ['JFM', 'JAS', 'ANN'] +seasons = ['JFM', 'JAS', 'ANN'] + +# comparison grid(s) ('lonlat', 'anatarctic') on which to plot analysis +comparisonGrids = ['latlon'] [climatologyMapSeaIceConcNH] ## options related to plotting horizontally remapped climatologies of diff --git a/mpas_analysis/ocean/climatology_map.py b/mpas_analysis/ocean/climatology_map.py index e0eb148d0..b58a70fae 100644 --- a/mpas_analysis/ocean/climatology_map.py +++ b/mpas_analysis/ocean/climatology_map.py @@ -20,16 +20,14 @@ from ..shared.constants import constants from ..shared.io.utility import build_config_full_path, make_directories -from ..shared.io import write_netcdf from ..shared.html import write_image_xml -from ..shared.climatology import get_lat_lon_comparison_descriptor, \ - get_remapper, get_mpas_climatology_dir_name, \ - get_observation_climatology_file_names, compute_climatology, \ - remap_and_write_climatology, update_climatology_bounds_from_file_names, \ - compute_climatologies_with_ncclimo, get_ncclimo_season_file_name +from ..shared.climatology import get_comparison_descriptor, \ + get_remapper, get_observation_climatology_file_names, \ + compute_climatology, remap_and_write_climatology, \ + RemapMpasClimatologySubtask -from ..shared.grid import MpasMeshDescriptor, LatLonGridDescriptor +from ..shared.grid import LatLonGridDescriptor from ..shared.mpas_xarray import mpas_xarray @@ -40,11 +38,132 @@ class ClimatologyMapOcean(AnalysisTask): # {{{ """ An analysis task for comparison of 2D model fields against observations. + Attributes + ---------- + fieldName : str + A short name of the field being analyzed + + fieldNameInTitle : str + An equivalent name of the field appropriate for figure titles + + mpasFieldName : str + The name of the MPAS timeSeriesStatsMonthly variable to be analyzed + + iselValues : dict + A dictionary of dimensions and indices (or ``None``) used to extract + a slice of the MPAS field. + + obsFileName : str + A file containing observtions from which to construct seasonal + climatologies. + + obsFieldName : str + A variable in the observations file to plot + + observationTitleLabel : str + A label on the subplot showing observations + + outFileLabel : str + A prefix for the resulting image file. + + unitsLabel : str + The units for the field being plotted. + + galleryGroup : str + In the generated website, the name of the group of analysis galleries + to which this analysis belongs + + groupLink : str + the link within the component webpage used to identify the gallery + group (a short version of the gallery group name with no spaces or + punctuation) + + groupSubtitle : str + a subtitle for the gallery group + + galleryName : str, optional + the name of the gallery (or possibly a subtitle for the gallery group + if there is only one gallery in the group) + + remapClimatologySubtask : ``RemapMpasClimatologySubtask`` + The subtask that remaps the climatologies this task will plot. + The ``remapClimatologySubtask`` is needed to determine the file names + of the climatology output. + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + Authors ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ + def __init__(self, config, mpasClimatologyTask, taskName, tags): # {{{ + ''' + Construct one analysis subtask for each plot (i.e. each season and + comparison grid) and a subtask for computing climatologies. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + taskName : str + The name of the task, typically the same as the subclass name + except starting with lowercase (e.g. 'climatologyMapSST' for class + 'ClimatologyMapSST') + + tags : list of str + Tags used to describe the task (e.g. 'climatology', + 'horizontalMap'). These are used to determine which tasks are + generated (e.g. 'all_transect' or 'no_climatology' in the + 'generate' flags) + + Authors + ------- + Xylar Asay-Davis + + ''' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapOcean, self).__init__(config=config, + taskName=taskName, + componentName='ocean', + tags=tags) + + self.mpasClimatologyTask = mpasClimatologyTask + + sectionName = self.taskName + + # read in what seasons we want to plot + seasons = config.getExpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + # comparisonGridNames = config.getExpression(sectionName, + # 'comparisonGrids') + + # if len(comparisonGridNames) == 0: + # raise ValueError('config section {} does not contain valid list ' + # 'of comparison grids'.format(sectionName)) + + # the variable self.mpasFieldName will be added to mpasClimatologyTask + # along with the seasons. + self.remapClimatologySubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=self.mpasClimatologyTask, + parentTask=self, + climatologyName=self.fieldName, + variableList=[self.mpasFieldName], + # comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=self.iselValues) + + # }}} + def setup_and_check(self): # {{{ """ Perform steps to set up the analysis and check for errors in the setup. @@ -61,80 +180,22 @@ def setup_and_check(self): # {{{ super(ClimatologyMapOcean, self).setup_and_check() config = self.config - fieldName = self.fieldName + self.startYear = self.mpasClimatologyTask.startYear + self.startDate = self.mpasClimatologyTask.startDate + self.endYear = self.mpasClimatologyTask.endYear + self.endDate = self.mpasClimatologyTask.endDate - self.check_analysis_enabled( - analysisOptionName='config_am_timeseriesstatsmonthly_enable', - raiseException=True) - - # get a list of timeSeriesStats output files from the streams file, - # reading only those that are between the start and end dates - startDate = config.get('climatology', 'startDate') - endDate = config.get('climatology', 'endDate') - streamName = 'timeSeriesStatsMonthlyOutput' - self.inputFiles = self.historyStreams.readpath( - streamName, startDate=startDate, endDate=endDate, - calendar=self.calendar) - - if len(self.inputFiles) == 0: - raise IOError('No files were found in stream {} between {} and ' - '{}.'.format(streamName, startDate, endDate)) - - changed, self.startYear, self.endYear, self.startDate, self.endDate = \ - update_climatology_bounds_from_file_names(self.inputFiles, - config) mainRunName = config.get('runs', 'mainRunName') - comparisonTimes = config.getExpression(self.taskName, - 'comparisonTimes') - - try: - self.restartFileName = self.runStreams.readpath('restart')[0] - except ValueError: - raise IOError('No MPAS-O restart file found: need at least one ' - 'restart file for ocn_modelvsobs calculation') + seasons = config.getExpression(self.taskName, 'seasons') - # make reamppers - mappingFilePrefix = 'map' - comparisonDescriptor = get_lat_lon_comparison_descriptor(config) - self.comparisonGridName = comparisonDescriptor.meshName - mpasDescriptor = MpasMeshDescriptor( - self.restartFileName, meshName=config.get('input', 'mpasMeshName')) - self.mpasMeshName = mpasDescriptor.meshName - - self.mpasRemapper = get_remapper( - config=config, sourceDescriptor=mpasDescriptor, - comparisonDescriptor=comparisonDescriptor, - mappingFilePrefix=mappingFilePrefix, - method=config.get('climatology', 'mpasInterpolationMethod'), - logger=self.logger) - - obsDescriptor = LatLonGridDescriptor.read(fileName=self.obsFileName, - latVarName='lat', - lonVarName='lon') - - origObsRemapper = Remapper(comparisonDescriptor, obsDescriptor) - - season = comparisonTimes[0] - - # now the observations - (climatologyFileName, remappedFileName) = \ - get_observation_climatology_file_names( - config=config, fieldName=fieldName, monthNames=season, - componentName='ocean', remapper=origObsRemapper) - - # make the remapper for the climatology - self.obsRemapper = get_remapper( - config=config, sourceDescriptor=obsDescriptor, - comparisonDescriptor=comparisonDescriptor, - mappingFilePrefix='map_obs_{}'.format(fieldName), - method=config.get('oceanObservations', - 'interpolationMethod'), - logger=self.logger) + # we set up the remapper here because ESFM_RegridWeightGen seems to + # have trouble if it runs in another process (or in several at once) + self._setup_obs_remapper() self.xmlFileNames = [] self.filePrefixes = {} - for season in comparisonTimes: + for season in seasons: filePrefix = '{}_{}_{}_years{:04d}-{:04d}'.format( self.outFileLabel, mainRunName, season, self.startYear, self.endYear) @@ -144,9 +205,8 @@ def setup_and_check(self): # {{{ # make the mapping directory, because doing so within each process # seems to be giving ESMF_RegridWeightGen some trouble - mappingSubdirectory = \ - build_config_full_path(self.config, 'output', - 'mappingSubdirectory') + mappingSubdirectory = build_config_full_path(config, 'output', + 'mappingSubdirectory') make_directories(mappingSubdirectory) # }}} @@ -167,109 +227,33 @@ def run_task(self): # {{{ config = self.config fieldName = self.fieldName - self.logger.info('\n Reading files:\n' - ' {} through\n {}'.format( - os.path.basename(self.inputFiles[0]), - os.path.basename(self.inputFiles[-1]))) + try: + restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least one ' + 'restart file for ocn_modelvsobs calculation') - mainRunName = config.get('runs', 'mainRunName') + mainRunName = self.config.get('runs', 'mainRunName') - outputTimes = config.getExpression(self.taskName, 'comparisonTimes') + seasons = config.getExpression(self.taskName, 'seasons') (colormapResult, colorbarLevelsResult) = setup_colormap( config, self.taskName, suffix='Result') (colormapDifference, colorbarLevelsDifference) = setup_colormap( config, self.taskName, suffix='Difference') - climatologyDirectory = \ - get_mpas_climatology_dir_name( - config=config, - fieldName=self.mpasFieldName, - mpasMeshName=self.mpasMeshName) - - (maskedClimatologyDirectory, remappedDirectory) = \ - get_mpas_climatology_dir_name( - config=config, - fieldName='{}_masked'.format(fieldName), - mpasMeshName=self.mpasMeshName, - comparisonGridName=self.comparisonGridName) - - dsRestart = xr.open_dataset(self.restartFileName) + dsRestart = xr.open_dataset(restartFileName) dsRestart = mpas_xarray.subset_variables(dsRestart, ['maxLevelCell']) - startYear = config.getint('climatology', 'startYear') - endYear = config.getint('climatology', 'endYear') - - modelName = 'mpaso' - - allExist = True - for season in outputTimes: - climatologyFileName = get_ncclimo_season_file_name( - climatologyDirectory, modelName, season, self.startYear, - self.endYear) - if not os.path.exists(climatologyFileName): - allExist = False - break - - if not allExist: - - compute_climatologies_with_ncclimo( - config=config, - inDirectory=self.historyDirectory, - outDirectory=climatologyDirectory, - startYear=startYear, - endYear=endYear, - variableList=[self.mpasFieldName], - modelName=modelName, - seasons=outputTimes, - decemberMode='sdd', - logger=self.logger) - dsObs = None # Interpolate and compute biases - for season in outputTimes: + for season in seasons: monthValues = constants.monthDictionary[season] - climatologyFileName = \ - get_ncclimo_season_file_name(climatologyDirectory, modelName, - season, startYear, endYear) - - maskedClimatologyFileName = \ - get_ncclimo_season_file_name(maskedClimatologyDirectory, - modelName, season, startYear, - endYear) - - remappedFileName = \ - get_ncclimo_season_file_name(remappedDirectory, - modelName, season, startYear, - endYear) - - if not os.path.exists(maskedClimatologyFileName): - # slice and mask the data set - climatology = xr.open_dataset(climatologyFileName) - iselValues = {'Time': 0} - if self.iselValues is not None: - iselValues.update(self.iselValues) - # select only Time=0 and possibly only the desired vertical - # slice - climatology = climatology.isel(**iselValues) - - # mask the data set - climatology[self.mpasFieldName] = \ - climatology[self.mpasFieldName].where( - dsRestart.maxLevelCell > 0) - - write_netcdf(climatology, maskedClimatologyFileName) - - if not os.path.exists(remappedFileName): - - self.mpasRemapper.remap_file( - inFileName=maskedClimatologyFileName, - outFileName=remappedFileName, - overwrite=True, - logger=self.logger) + remappedFileName = self.remapClimatologySubtask.get_file_name( + season=season, stage='remapped', comparisonGridName='latlon') remappedClimatology = xr.open_dataset(remappedFileName) @@ -352,6 +336,49 @@ def run_task(self): # {{{ # }}} + def _setup_obs_remapper(self): # {{{ + """ + Set up the remapper for remapping from the MPAS to the comparison + grid. + + Authors + ------- + Xylar Asay-Davis + """ + config = self.config + fieldName = self.fieldName + + seasons = config.getExpression(self.taskName, 'seasons') + + # make reamppers + comparisonDescriptor = get_comparison_descriptor( + config=config, comparisonGridName='latlon') + self.comparisonGridName = comparisonDescriptor.meshName + + obsDescriptor = LatLonGridDescriptor.read(fileName=self.obsFileName, + latVarName='lat', + lonVarName='lon') + + origObsRemapper = Remapper(comparisonDescriptor, obsDescriptor) + + season = seasons[0] + + # now the observations + (climatologyFileName, remappedFileName) = \ + get_observation_climatology_file_names( + config=config, fieldName=fieldName, monthNames=season, + componentName='ocean', remapper=origObsRemapper) + + # make the remapper for the climatology + self.obsRemapper = get_remapper( + config=config, sourceDescriptor=obsDescriptor, + comparisonDescriptor=comparisonDescriptor, + mappingFilePrefix='map_obs_{}'.format(fieldName), + method=config.get('oceanObservations', + 'interpolationMethod'), + logger=self.logger) + + # }}} # }}} @@ -364,7 +391,7 @@ class ClimatologyMapSST(ClimatologyMapOcean): # {{{ ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ - def __init__(self, config): # {{{ + def __init__(self, config, mpasClimatologyTask): # {{{ """ Construct the analysis task. @@ -373,18 +400,23 @@ def __init__(self, config): # {{{ config : instance of MpasAnalysisConfigParser Contains configuration options + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + Authors ------- Xylar Asay-Davis """ self.fieldName = 'sst' self.fieldNameInTitle = 'SST' + self.mpasFieldName = 'timeMonthly_avg_activeTracers_temperature' + self.iselValues = {'nVertLevels': 0} # call the constructor from the base class (ClimatologyMapOcean) super(ClimatologyMapSST, self).__init__( config=config, + mpasClimatologyTask=mpasClimatologyTask, taskName='climatologyMapSST', - componentName='ocean', tags=['climatology', 'horizontalMap', self.fieldName]) # }}} @@ -408,9 +440,6 @@ def setup_and_check(self): # {{{ "{}/MODEL.SST.HAD187001-198110.OI198111-201203.nc".format( observationsDirectory) - self.mpasFieldName = 'timeMonthly_avg_activeTracers_temperature' - self.iselValues = {'nVertLevels': 0} - self.obsFieldName = 'SST' climStartYear = self.config.getint('oceanObservations', @@ -483,7 +512,7 @@ class ClimatologyMapSSS(ClimatologyMapOcean): # {{{ ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ - def __init__(self, config): # {{{ + def __init__(self, config, mpasClimatologyTask): # {{{ """ Construct the analysis task. @@ -492,18 +521,23 @@ def __init__(self, config): # {{{ config : instance of MpasAnalysisConfigParser Contains configuration options + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + Authors ------- Xylar Asay-Davis """ self.fieldName = 'sss' self.fieldNameInTitle = 'SSS' + self.mpasFieldName = 'timeMonthly_avg_activeTracers_salinity' + self.iselValues = {'nVertLevels': 0} # call the constructor from the base class (ClimatologyMapOcean) super(ClimatologyMapSSS, self).__init__( config=config, + mpasClimatologyTask=mpasClimatologyTask, taskName='climatologyMapSSS', - componentName='ocean', tags=['climatology', 'horizontalMap', self.fieldName]) # }}} @@ -527,9 +561,6 @@ def setup_and_check(self): # {{{ '{}/Aquarius_V3_SSS_Monthly.nc'.format( observationsDirectory) - self.mpasFieldName = 'timeMonthly_avg_activeTracers_salinity' - self.iselValues = {'nVertLevels': 0} - self.obsFieldName = 'SSS' self.observationTitleLabel = 'Observations (Aquarius, 2011-2014)' @@ -584,7 +615,7 @@ class ClimatologyMapMLD(ClimatologyMapOcean): # {{{ ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ - def __init__(self, config): # {{{ + def __init__(self, config, mpasClimatologyTask): # {{{ """ Construct the analysis task. @@ -593,6 +624,9 @@ def __init__(self, config): # {{{ config : instance of MpasAnalysisConfigParser Contains configuration options + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + Authors ------- Xylar Asay-Davis @@ -600,12 +634,14 @@ def __init__(self, config): # {{{ self.fieldName = 'mld' self.fieldNameInTitle = 'MLD' + self.mpasFieldName = 'timeMonthly_avg_dThreshMLD' + self.iselValues = None # call the constructor from the base class (ClimatologyMapOcean) super(ClimatologyMapMLD, self).__init__( config=config, + mpasClimatologyTask=mpasClimatologyTask, taskName='climatologyMapMLD', - componentName='ocean', tags=['climatology', 'horizontalMap', self.fieldName]) # }}} @@ -629,9 +665,6 @@ def setup_and_check(self): # {{{ '{}/holtetalley_mld_climatology.nc'.format( observationsDirectory) - self.mpasFieldName = 'timeMonthly_avg_dThreshMLD' - self.iselValues = None - self.obsFieldName = 'mld_dt_mean' # Set appropriate MLD figure labels diff --git a/mpas_analysis/ocean/meridional_heat_transport.py b/mpas_analysis/ocean/meridional_heat_transport.py index aeeb7347e..475d39ef1 100644 --- a/mpas_analysis/ocean/meridional_heat_transport.py +++ b/mpas_analysis/ocean/meridional_heat_transport.py @@ -7,15 +7,10 @@ from ..shared.plot.plotting import plot_vertical_section,\ setup_colormap, plot_1D -from ..shared.io.utility import build_config_full_path, make_directories +from ..shared.io.utility import build_config_full_path from ..shared.timekeeping.utility import get_simulation_start_time -from ..shared.climatology.climatology \ - import update_climatology_bounds_from_file_names, \ - compute_climatologies_with_ncclimo, \ - get_ncclimo_season_file_name - from ..shared import AnalysisTask from ..shared.html import write_image_xml @@ -24,12 +19,18 @@ class MeridionalHeatTransport(AnalysisTask): # {{{ ''' Plot meridional heat transport from the analysis member output. + Attributes + ---------- + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + Authors ------- Mark Petersen, Milena Veneziani, Xylar Asay-Davis ''' - def __init__(self, config): # {{{ + def __init__(self, config, mpasClimatologyTask): # {{{ ''' Construct the analysis task. @@ -38,6 +39,9 @@ def __init__(self, config): # {{{ config : instance of MpasAnalysisConfigParser Contains configuration options + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + Authors ------- Xylar Asay-Davis @@ -49,6 +53,10 @@ def __init__(self, config): # {{{ taskName='meridionalHeatTransport', componentName='ocean', tags=['climatology']) + + self.mpasClimatologyTask = mpasClimatologyTask + self.run_after(mpasClimatologyTask) + # }}} def setup_and_check(self): # {{{ @@ -71,36 +79,17 @@ def setup_and_check(self): # {{{ # self.calendar super(MeridionalHeatTransport, self).setup_and_check() + self.startYear = self.mpasClimatologyTask.startYear + self.startDate = self.mpasClimatologyTask.startDate + self.endYear = self.mpasClimatologyTask.endYear + self.endDate = self.mpasClimatologyTask.endDate + config = self.config - self.check_analysis_enabled( - analysisOptionName='config_am_timeseriesstatsmonthly_enable', - raiseException=True) self.check_analysis_enabled( analysisOptionName='config_am_meridionalheattransport_enable', raiseException=True) - # Get a list of timeSeriesStats output files from the streams file, - # reading only those that are between the start and end dates - # First a list necessary for the MHT climatology - streamName = 'timeSeriesStatsMonthlyOutput' - self.startDate = config.get('climatology', 'startDate') - self.endDate = config.get('climatology', 'endDate') - self.inputFiles = \ - self.historyStreams.readpath(streamName, - startDate=self.startDate, - endDate=self.endDate, - calendar=self.calendar) - - if len(self.inputFiles) == 0: - raise IOError('No files were found in stream {} between {} and ' - '{}.'.format(streamName, self.startDate, - self.endDate)) - - changed, self.startYear, self.endYear, self.startDate, self.endDate = \ - update_climatology_bounds_from_file_names(self.inputFiles, - self.config) - # Later, we will read in depth and MHT latitude points # from mpaso.hist.am.meridionalHeatTransport.*.nc mhtFiles = self.historyStreams.readpath( @@ -133,6 +122,12 @@ def setup_and_check(self): # {{{ mainRunName = self.config.get('runs', 'mainRunName') + variableList = ['timeMonthly_avg_meridionalHeatTransportLat', + 'timeMonthly_avg_meridionalHeatTransportLatZ'] + + self.mpasClimatologyTask.add_variables(variableList=variableList, + seasons=['ANN']) + self.xmlFileNames = [] self.filePrefixes = {} @@ -198,44 +193,13 @@ def run_task(self): # {{{ # Then we will need to add another section for regions with a loop # over number of regions. ###################################################################### - variableList = ['timeMonthly_avg_meridionalHeatTransportLat', - 'timeMonthly_avg_meridionalHeatTransportLatZ'] - - self.logger.info('\n Compute and plot global meridional heat ' - 'transport') - - outputRoot = build_config_full_path(config, 'output', - 'mpasClimatologySubdirectory') - - outputDirectory = '{}/mht'.format(outputRoot) - self.logger.info('\n List of files for climatologies:\n' - ' {} through\n {}'.format( - os.path.basename(self.inputFiles[0]), - os.path.basename(self.inputFiles[-1]))) + self.logger.info('\n Plotting global meridional heat transport') self.logger.info(' Load data...') - climatologyFileName = get_ncclimo_season_file_name(outputDirectory, - 'mpaso', 'ANN', - self.startYear, - self.endYear) - - if not os.path.exists(climatologyFileName): - make_directories(outputDirectory) - - # Compute annual climatology - compute_climatologies_with_ncclimo( - config=config, - inDirectory=self.historyDirectory, - outDirectory=outputDirectory, - startYear=self.startYear, - endYear=self.endYear, - variableList=variableList, - modelName='mpaso', - seasons=['ANN'], - decemberMode='sdd', - logger=self.logger) + climatologyFileName = self.mpasClimatologyTask.get_file_name( + season='ANN') annualClimatology = xr.open_dataset(climatologyFileName) annualClimatology = annualClimatology.isel(Time=0) diff --git a/mpas_analysis/ocean/streamfunction_moc.py b/mpas_analysis/ocean/streamfunction_moc.py index d4c409d7a..e7fd2f703 100644 --- a/mpas_analysis/ocean/streamfunction_moc.py +++ b/mpas_analysis/ocean/streamfunction_moc.py @@ -17,11 +17,6 @@ from ..shared.timekeeping.utility import get_simulation_start_time, \ days_to_datetime -from ..shared.climatology.climatology \ - import update_climatology_bounds_from_file_names, \ - compute_climatologies_with_ncclimo, \ - get_ncclimo_season_file_name - from ..shared import AnalysisTask from ..shared.time_series import cache_time_series @@ -38,12 +33,18 @@ class StreamfunctionMOC(AnalysisTask): # {{{ * MOC time series (max value at 24.5N), post-processed * MOC time series (max value at 24.5N), from MOC analysis member + Attributes + ---------- + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + Authors ------- Milena Veneziani, Mark Petersen, Phillip Wolfram, Xylar Asay-Davis ''' - def __init__(self, config): # {{{ + def __init__(self, config, mpasClimatologyTask): # {{{ ''' Construct the analysis task. @@ -52,6 +53,9 @@ def __init__(self, config): # {{{ config : instance of MpasAnalysisConfigParser Contains configuration options + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + Authors ------- Xylar Asay-Davis @@ -64,6 +68,8 @@ def __init__(self, config): # {{{ componentName='ocean', tags=['streamfunction', 'moc', 'climatology', 'timeSeries']) + self.mpasClimatologyTask = mpasClimatologyTask + self.run_after(mpasClimatologyTask) # }}} def setup_and_check(self): # {{{ @@ -87,11 +93,12 @@ def setup_and_check(self): # {{{ # self.calendar super(StreamfunctionMOC, self).setup_and_check() - config = self.config + self.startYearClimo = self.mpasClimatologyTask.startYear + self.startDateClimo = self.mpasClimatologyTask.startDate + self.endYearClimo = self.mpasClimatologyTask.endYear + self.endDateClimo = self.mpasClimatologyTask.endDate - self.check_analysis_enabled( - analysisOptionName='config_am_timeseriesstatsmonthly_enable', - raiseException=True) + config = self.config self.mocAnalysisMemberEnabled = self.check_analysis_enabled( analysisOptionName='config_am_mocstreamfunction_enable', @@ -101,29 +108,9 @@ def setup_and_check(self): # {{{ # reading only those that are between the start and end dates # First a list necessary for the streamfunctionMOC climatology streamName = 'timeSeriesStatsMonthlyOutput' - self.startDateClimo = config.get('climatology', 'startDate') - self.endDateClimo = config.get('climatology', 'endDate') - self.inputFilesClimo = \ - self.historyStreams.readpath(streamName, - startDate=self.startDateClimo, - endDate=self.endDateClimo, - calendar=self.calendar) - - if len(self.inputFilesClimo) == 0: - raise IOError('No files were found in stream {} between {} and ' - '{}.'.format(streamName, self.startDateClimo, - self.endDateClimo)) self.simulationStartTime = get_simulation_start_time(self.runStreams) - update_climatology_bounds_from_file_names(self.inputFilesClimo, - self.config) - - self.startDateClimo = config.get('climatology', 'startDate') - self.endDateClimo = config.get('climatology', 'endDate') - self.startYearClimo = config.getint('climatology', 'startYear') - self.endYearClimo = config.getint('climatology', 'endYear') - # Then a list necessary for the streamfunctionMOC Atlantic timeseries self.startDateTseries = config.get('timeSeries', 'startDate') self.endDateTseries = config.get('timeSeries', 'endDate') @@ -142,6 +129,12 @@ def setup_and_check(self): # {{{ self.sectionName = 'streamfunctionMOC' + variableList = ['timeMonthly_avg_normalVelocity', + 'timeMonthly_avg_vertVelocityTop'] + + self.mpasClimatologyTask.add_variables(variableList=variableList, + seasons=['ANN']) + self.xmlFileNames = [] self.filePrefixes = {} @@ -181,11 +174,6 @@ def run_task(self): # {{{ self.logger.info("\nPlotting streamfunction of Meridional Overturning " "Circulation (MOC)...") - self.logger.info('\n List of files for climatologies:\n' - ' {} through\n {}'.format( - os.path.basename(self.inputFilesClimo[0]), - os.path.basename(self.inputFilesClimo[-1]))) - self.logger.info('\n List of files for time series:\n' ' {} through\n {}'.format( os.path.basename(self.inputFilesTseries[0]), @@ -203,7 +191,6 @@ def run_task(self): # {{{ # sectionName, dictClimo, # dictTseries) else: - self._compute_velocity_climatologies() self._compute_moc_climo_postprocess() dsMOCTimeSeries = self._compute_moc_time_series_postprocess() @@ -212,9 +199,8 @@ def run_task(self): # {{{ mainRunName = config.get('runs', 'mainRunName') movingAveragePoints = config.getint(self.sectionName, 'movingAveragePoints') - movingAveragePointsClimatological = \ - config.getint(self.sectionName, - 'movingAveragePointsClimatological') + movingAveragePointsClimatological = config.getint( + self.sectionName, 'movingAveragePointsClimatological') colorbarLabel = '[Sv]' xLabel = 'latitude [deg]' yLabel = 'depth [m]' @@ -311,40 +297,6 @@ def _load_mesh(self): # {{{ refTopDepth, refLayerThickness # }}} - def _compute_velocity_climatologies(self): # {{{ - '''compute yearly velocity climatologies and cache them''' - - variableList = ['timeMonthly_avg_normalVelocity', - 'timeMonthly_avg_vertVelocityTop'] - - config = self.config - - outputRoot = build_config_full_path(config, 'output', - 'mpasClimatologySubdirectory') - - outputDirectory = '{}/meanVelocity'.format(outputRoot) - - self.velClimoFile = get_ncclimo_season_file_name(outputDirectory, - 'mpaso', 'ANN', - self.startYearClimo, - self.endYearClimo) - - if not os.path.exists(self.velClimoFile): - make_directories(outputDirectory) - - compute_climatologies_with_ncclimo( - config=config, - inDirectory=self.historyDirectory, - outDirectory=outputDirectory, - startYear=self.startYearClimo, - endYear=self.endYearClimo, - variableList=variableList, - modelName='mpaso', - seasons=['ANN'], - decemberMode='sdd', - logger=self.logger) - # }}} - def _compute_moc_climo_postprocess(self): # {{{ '''compute mean MOC streamfunction as a post-process''' @@ -408,7 +360,9 @@ def _compute_moc_climo_postprocess(self): # {{{ if not os.path.exists(outputFileClimo): self.logger.info(' Load data...') - annualClimatology = xr.open_dataset(self.velClimoFile) + climatologyFileName = self.mpasClimatologyTask.get_file_name( + season='ANN') + annualClimatology = xr.open_dataset(climatologyFileName) # rename some variables for convenience annualClimatology = annualClimatology.rename( {'timeMonthly_avg_normalVelocity': 'avgNormalVelocity', diff --git a/mpas_analysis/sea_ice/climatology_map.py b/mpas_analysis/sea_ice/climatology_map.py index 9e09ded86..b2fd5d270 100644 --- a/mpas_analysis/sea_ice/climatology_map.py +++ b/mpas_analysis/sea_ice/climatology_map.py @@ -6,15 +6,11 @@ import xarray as xr -from ..shared.climatology import get_lat_lon_comparison_descriptor, \ - get_remapper, get_mpas_climatology_dir_name, \ - get_observation_climatology_file_names, \ - update_climatology_bounds_from_file_names, \ - remap_and_write_climatology, \ - compute_climatologies_with_ncclimo, \ - get_ncclimo_season_file_name +from ..shared.climatology import get_comparison_descriptor, \ + get_remapper, get_observation_climatology_file_names, \ + remap_and_write_climatology, RemapMpasClimatologySubtask -from ..shared.grid import MpasMeshDescriptor, LatLonGridDescriptor +from ..shared.grid import LatLonGridDescriptor from ..shared.plot.plotting import plot_polar_comparison, \ setup_colormap @@ -31,84 +27,102 @@ class ClimatologyMapSeaIce(SeaIceAnalysisTask): General comparison of 2-d model fields against data. Currently only supports sea ice concentration and sea ice thickness + Attributes + ---------- + remapClimatologySubtask : ``RemapMpasClimatologySubtask`` + The subtask that remaps the climatologies this task will plot. + The ``remapClimatologySubtask`` is needed to determine the file names + of the climatology output. + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + Authors ------- Xylar Asay-Davis, Milena Veneziani """ - def setup_and_check(self): # {{{ - """ - Perform steps to set up the analysis and check for errors in the setup. + def __init__(self, config, mpasClimatologyTask, taskName, tags): # {{{ + ''' + Construct one analysis subtask for each plot (i.e. each season and + comparison grid) and a subtask for computing climatologies. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + taskName : str + The name of the task, typically the same as the subclass name + except starting with lowercase (e.g. 'climatologyMapSST' for class + 'ClimatologyMapSST') + + tags : list of str + Tags used to describe the task (e.g. 'climatology', + 'horizontalMap'). These are used to determine which tasks are + generated (e.g. 'all_transect' or 'no_climatology' in the + 'generate' flags) Authors ------- Xylar Asay-Davis - """ - # call setup_and_check from the base class (SeaIceAnalysisTask), - # which will perform some common setup - super(ClimatologyMapSeaIce, self).setup_and_check() - self.check_analysis_enabled( - analysisOptionName='config_am_timeseriesstatsmonthly_enable', - raiseException=True) - - # get a list of timeSeriesStatsMonthly output files from the streams - # file, reading only those that are between the start and end dates - streamName = 'timeSeriesStatsMonthlyOutput' - self.startDate = self.config.get('climatology', 'startDate') - self.endDate = self.config.get('climatology', 'endDate') - self.inputFiles = \ - self.historyStreams.readpath(streamName, - startDate=self.startDate, - endDate=self.endDate, - calendar=self.calendar) - - if len(self.inputFiles) == 0: - raise IOError('No files were found in stream {} between {} and ' - '{}.'.format(streamName, self.startDate, - self.endDate)) - - changed, self.startYear, self.endYear, self.startDate, self.endDate = \ - update_climatology_bounds_from_file_names(self.inputFiles, - self.config) + ''' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapSeaIce, self).__init__(config=config, + taskName=taskName, + componentName='seaIce', + tags=tags) - mainRunName = self.config.get('runs', 'mainRunName') - config = self.config - hemisphere = self.hemisphere + self.mpasClimatologyTask = mpasClimatologyTask - mpasDescriptor = MpasMeshDescriptor( - self.restartFileName, - meshName=self.config.get('input', 'mpasMeshName')) + sectionName = self.taskName - comparisonDescriptor = get_lat_lon_comparison_descriptor(self.config) + # read in what seasons we want to plot + seasons = config.getExpression(sectionName, 'seasons') - self.mpasRemapper = get_remapper( - config=self.config, sourceDescriptor=mpasDescriptor, - comparisonDescriptor=comparisonDescriptor, - mappingFilePrefix='map', - method=self.config.get('climatology', 'mpasInterpolationMethod'), - logger=self.logger) + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) - info = self.obsAndPlotInfo[0] - season = info['season'] + # the variable self.mpasFieldName will be added to mpasClimatologyTask + # along with the seasons. + self.remapClimatologySubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=self.mpasClimatologyTask, + parentTask=self, + climatologyName=self.fieldName, + variableList=[self.mpasFieldName], + seasons=seasons, + iselValues=self.iselValues) - fieldName = '{}{}'.format(self.mpasFieldName, hemisphere) + # }}} - obsFileName = info['obsFileName'] + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. - obsDescriptor = LatLonGridDescriptor.read(fileName=obsFileName, - latVarName='t_lat', - lonVarName='t_lon') + Authors + ------- + Xylar Asay-Davis + """ + # first, call setup_and_check from the base class (SeaIceAnalysisTask), + # which will perform some common setup + super(ClimatologyMapSeaIce, self).setup_and_check() - fieldName = '{}{}'.format(self.obsFieldName, hemisphere) - self.obsRemapper = get_remapper( - config=config, - sourceDescriptor=obsDescriptor, - comparisonDescriptor=comparisonDescriptor, - mappingFilePrefix='map_obs_{}'.format(fieldName), - method=config.get('seaIceObservations', - 'interpolationMethod'), - logger=self.logger) + self.startYear = self.mpasClimatologyTask.startYear + self.startDate = self.mpasClimatologyTask.startDate + self.endYear = self.mpasClimatologyTask.endYear + self.endDate = self.mpasClimatologyTask.endDate + + mainRunName = self.config.get('runs', 'mainRunName') + + # we set up the remapper here because ESFM_RegridWeightGen seems to + # have trouble if it runs in another process (or in several at once) + self._setup_obs_remapper() self.xmlFileNames = [] @@ -145,32 +159,11 @@ def run_task(self): # {{{ self.logger.info("\nPlotting 2-d maps of {} climatologies...".format( self.fieldNameInTitle)) - self.logger.info('\n Reading files:\n' - ' {} through\n {}'.format( - os.path.basename(self.inputFiles[0]), - os.path.basename(self.inputFiles[-1]))) - - self._compute_seasonal_climatologies() - - self._compute_and_plot() # }}} - - def _compute_and_plot(self): # {{{ - ''' - computes seasonal climatologies and plots model results, observations - and biases. - - Authors - ------- - Xylar Asay-Davis, Milena Veneziani - ''' - - self.logger.info(' Make ice concentration plots...') - config = self.config mainRunName = config.get('runs', 'mainRunName') - startYear = config.getint('climatology', 'startYear') - endYear = config.getint('climatology', 'endYear') + startYear = self.startYear + endYear = self.endYear hemisphere = self.hemisphere sectionName = self.sectionName @@ -195,10 +188,8 @@ def _compute_and_plot(self): # {{{ fieldName = '{}{}'.format(self.mpasFieldName, hemisphere) - remappedFileName = \ - get_ncclimo_season_file_name(self.remappedDirectory, - 'mpascice', season, - self.startYear, self.endYear) + remappedFileName = self.remapClimatologySubtask.get_file_name( + season=season, stage='remapped', comparisonGridName='latlon') remappedClimatology = xr.open_dataset(remappedFileName) modelOutput = remappedClimatology[self.mpasFieldName].values @@ -298,98 +289,40 @@ def _compute_and_plot(self): # {{{ imageCaption=imageCaption) # }}} - def _compute_seasonal_climatologies(self): # {{{ + def _setup_obs_remapper(self): # {{{ + """ + Set up the remapper for remapping from the MPAS to the comparison + grid. + Authors + ------- + Xylar Asay-Davis + """ config = self.config + hemisphere = self.hemisphere - mpasMeshName = self.mpasRemapper.sourceDescriptor.meshName - comparisonGridName = self.mpasRemapper.destinationDescriptor.meshName - - startYear = config.getint('climatology', 'startYear') - endYear = config.getint('climatology', 'endYear') - - fieldName = '{}{}'.format(self.fieldName, self.hemisphere) - - self.climatologyDirectory = \ - get_mpas_climatology_dir_name( - config=config, - fieldName=fieldName, - mpasMeshName=mpasMeshName) - - (self.maskedClimatologyDirectory, self.remappedDirectory) = \ - get_mpas_climatology_dir_name( - config=config, - fieldName='{}_masked'.format(fieldName), - mpasMeshName=mpasMeshName, - comparisonGridName=comparisonGridName) - - modelName = 'mpascice' - - allExist = True - for season in self.seasons: - climatologyFileName = get_ncclimo_season_file_name( - self.climatologyDirectory, modelName, season, - self.startYear, self.endYear) - if not os.path.exists(climatologyFileName): - allExist = False - break - - if not allExist: - - compute_climatologies_with_ncclimo( - config=config, - inDirectory=self.historyDirectory, - outDirectory=self.climatologyDirectory, - startYear=startYear, - endYear=endYear, - variableList=['timeMonthly_avg_iceAreaCell', - 'timeMonthly_avg_iceVolumeCell'], - modelName=modelName, - seasons=self.seasons, - decemberMode='sdd', - logger=self.logger) - - self._remap_seasonal_climatology() + info = self.obsAndPlotInfo[0] - # }}} + fieldName = '{}{}'.format(self.mpasFieldName, hemisphere) - def _remap_seasonal_climatology(self): # {{{ + obsFileName = info['obsFileName'] - modelName = 'mpascice' + obsDescriptor = LatLonGridDescriptor.read(fileName=obsFileName, + latVarName='t_lat', + lonVarName='t_lon') - for season in self.seasons: + comparisonDescriptor = get_comparison_descriptor( + config=config, comparisonGridName='latlon') - # interpolate the model results - climatologyFileName = \ - get_ncclimo_season_file_name(self.climatologyDirectory, - modelName, season, - self.startYear, self.endYear) - - maskedClimatologyFileName = \ - get_ncclimo_season_file_name(self.maskedClimatologyDirectory, - modelName, season, - self.startYear, self.endYear) - - remappedFileName = \ - get_ncclimo_season_file_name(self.remappedDirectory, - modelName, season, - self.startYear, self.endYear) - - if not os.path.exists(maskedClimatologyFileName): - # slice the data set and set _FillValue (happens automatically) - climatology = xr.open_dataset(climatologyFileName) - iselValues = {'Time': 0} - # select only Time=0 - climatology = climatology.isel(**iselValues) - - write_netcdf(climatology, maskedClimatologyFileName) - - if not os.path.exists(remappedFileName): - self.mpasRemapper.remap_file( - inFileName=maskedClimatologyFileName, - outFileName=remappedFileName, - overwrite=True, - logger=self.logger) + fieldName = '{}{}'.format(self.obsFieldName, hemisphere) + self.obsRemapper = get_remapper( + config=config, + sourceDescriptor=obsDescriptor, + comparisonDescriptor=comparisonDescriptor, + mappingFilePrefix='map_obs_{}'.format(fieldName), + method=config.get('seaIceObservations', + 'interpolationMethod'), + logger=self.logger) # }}} # }}} @@ -403,7 +336,7 @@ class ClimatologyMapSeaIceConc(ClimatologyMapSeaIce): # {{{ ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ - def __init__(self, config, hemisphere): + def __init__(self, config, mpasClimatologyTask, hemisphere): # {{{ """ Construct the analysis task. @@ -413,6 +346,9 @@ def __init__(self, config, hemisphere): config : instance of MpasAnalysisConfigParser Contains configuration options + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + hemisphere : {'NH', 'SH'} The hemisphere to plot @@ -437,10 +373,9 @@ def __init__(self, config, hemisphere): tags = ['climatology', 'horizontalMap', self.fieldName] # call the constructor from the base class (AnalysisTask) - super(ClimatologyMapSeaIceConc, self).__init__(config=config, - taskName=taskName, - componentName='seaIce', - tags=tags) + super(ClimatologyMapSeaIceConc, self).__init__( + config=config, mpasClimatologyTask=mpasClimatologyTask, + taskName=taskName, tags=tags) # }}} @@ -534,7 +469,7 @@ class ClimatologyMapSeaIceThick(ClimatologyMapSeaIce): # {{{ ------- Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani """ - def __init__(self, config, hemisphere): + def __init__(self, config, mpasClimatologyTask, hemisphere): # {{{ """ Construct the analysis task. @@ -544,6 +479,9 @@ def __init__(self, config, hemisphere): config : instance of MpasAnalysisConfigParser Contains configuration options + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + hemisphere : {'NH', 'SH'} The hemisphere to plot @@ -566,10 +504,9 @@ def __init__(self, config, hemisphere): tags = ['climatology', 'horizontalMap', self.fieldName] # call the constructor from the base class (AnalysisTask) - super(ClimatologyMapSeaIceThick, self).__init__(config=config, - taskName=taskName, - componentName='seaIce', - tags=tags) + super(ClimatologyMapSeaIceThick, self).__init__( + config=config, mpasClimatologyTask=mpasClimatologyTask, + taskName=taskName, tags=tags) # }}} diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index a2cb5e827..0c91fff65 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -65,6 +65,16 @@ class AnalysisTask(Process): # {{{ calendar : {'gregorian', 'gregoraian_noleap'} The calendar used in the MPAS run + runAfterTasks : list of ``AnalysisTasks`` + tasks that must be complete before this task can run + + subtasks : ``OrderedDict`` of ``AnalysisTasks`` + Subtasks of this task, with subtask names as keys + + xmlFileNames : list of strings + The XML file associated with each plot produced by this analysis, empty + if no plots were produced + logger : ``logging.Logger`` A logger for output during the run phase of an analysis task @@ -82,7 +92,8 @@ class AnalysisTask(Process): # {{{ SUCCESS = 4 FAIL = 5 - def __init__(self, config, taskName, componentName, tags=[]): # {{{ + def __init__(self, config, taskName, componentName, tags=[], + subtaskName=None): # {{{ ''' Construct the analysis task. @@ -109,24 +120,36 @@ def __init__(self, config, taskName, componentName, tags=[]): # {{{ which tasks are generated (e.g. 'all_transect' or 'no_climatology' in the 'generate' flags) + subtaskName : str, optional + If this is a subtask of ``taskName``, the name of the subtask + Authors ------- Xylar Asay-Davis ''' - # This will include a subtask name as well in the future - self.fullTaskName = taskName + if subtaskName is None: + self.fullTaskName = taskName + self.printTaskName = taskName + else: + self.fullTaskName = '{}_{}'.format(taskName, subtaskName) + self.printTaskName = '{}: {}'.format(taskName, subtaskName) # call the constructor from the base class (Process) super(AnalysisTask, self).__init__(name=self.fullTaskName) self.config = config self.taskName = taskName + self.subtaskName = subtaskName self.componentName = componentName self.tags = tags + self.subtasks = [] self.logger = None + self.runAfterTasks = [] + self.xmlFileNames = [] # non-public attributes related to multiprocessing and logging self.daemon = True + self._setupStatus = None self._runStatus = Value('i', AnalysisTask.UNSET) self._stackTrace = None self._logFileName = None @@ -209,6 +232,48 @@ def run_task(self): # {{{ ''' return # }}} + def run_after(self, task): # {{{ + ''' + Only run this task after the given task has completed. This allows a + task to be constructed of multiple subtasks, some of which may block + later tasks, while allowing some subtasks to run in parallel. It also + allows for tasks to depend on other tasks (e.g. for computing + climatologies or extracting time series for many variables at once). + + Parameters + ---------- + task : ``AnalysisTask`` + The task that should finish before this one begins + + Authors + ------- + Xylar Asay-Davis + ''' + + self.runAfterTasks.append(task) + # }}} + + def add_subtask(self, subtask): # {{{ + ''' + Add a subtask to this tasks. This task always runs after the subtask + has finished. However, this task gets set up *before* the subtask, + so the setup of the subtask can depend on fields defined during the + setup of this task (the parent). + + Parameters + ---------- + subtask : ``AnalysisTask`` + The subtask to run as part of this task + + Authors + ------- + Xylar Asay-Davis + ''' + + if subtask not in self.subtasks: + self.subtasks.append(subtask) + # }}} + def run(self, writeLogFile=True): # {{{ ''' Sets up logging and then runs the analysis task. @@ -245,7 +310,6 @@ def run(self, writeLogFile=True): # {{{ sys.stdout = StreamToLogger(self.logger, logging.INFO) sys.stderr = StreamToLogger(self.logger, logging.ERROR) - self._runStatus.value = AnalysisTask.RUNNING startTime = time.time() try: self.run_task() @@ -255,7 +319,7 @@ def run(self, writeLogFile=True): # {{{ raise e self._stackTrace = traceback.format_exc() self.logger.error("analysis task {} failed during run \n" - "{}".format(self.taskName, self._stackTrace)) + "{}".format(self.fullTaskName, self._stackTrace)) self._runStatus.value = AnalysisTask.FAIL runDuration = time.time() - startTime @@ -493,5 +557,4 @@ def flush(self): # }}} - # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/shared/climatology/__init__.py b/mpas_analysis/shared/climatology/__init__.py index b9ab84322..a6fe39aa3 100644 --- a/mpas_analysis/shared/climatology/__init__.py +++ b/mpas_analysis/shared/climatology/__init__.py @@ -1,6 +1,9 @@ -from .climatology import get_lat_lon_comparison_descriptor, get_remapper, \ - get_mpas_climatology_dir_name, get_observation_climatology_file_names, \ - compute_monthly_climatology, compute_climatology, cache_climatologies, \ - add_years_months_days_in_month, remap_and_write_climatology, \ - compute_climatologies_with_ncclimo, \ - update_climatology_bounds_from_file_names, get_ncclimo_season_file_name +from .climatology import get_remapper, \ + get_observation_climatology_file_names, \ + compute_monthly_climatology, compute_climatology, \ + add_years_months_days_in_month, remap_and_write_climatology + +from .mpas_climatology_task import MpasClimatologyTask +from .remap_mpas_climatology_subtask import RemapMpasClimatologySubtask +from .comparison_descriptors import get_comparison_descriptor, \ + get_antarctic_stereographic_projection diff --git a/mpas_analysis/shared/climatology/climatology.py b/mpas_analysis/shared/climatology/climatology.py index 75e8343e8..900095255 100644 --- a/mpas_analysis/shared/climatology/climatology.py +++ b/mpas_analysis/shared/climatology/climatology.py @@ -9,9 +9,6 @@ import xarray as xr import os import numpy -from distutils.spawn import find_executable -import sys -import subprocess from ..constants import constants @@ -25,44 +22,6 @@ from ..grid import LatLonGridDescriptor, ProjectionGridDescriptor -def get_lat_lon_comparison_descriptor(config): # {{{ - """ - Get a descriptor of the lat/lon comparison grid, used for remapping and - determining the grid name - - Parameters - ---------- - config : instance of ``MpasAnalysisConfigParser`` - Contains configuration options - - Returns - ------- - descriptor : ``LatLonGridDescriptor`` object - A descriptor of the lat/lon grid - - Authors - ------- - Xylar Asay-Davis - """ - climSection = 'climatology' - - comparisonLatRes = config.getWithDefault(climSection, - 'comparisonLatResolution', - constants.dLatitude) - comparisonLonRes = config.getWithDefault(climSection, - 'comparisonLatResolution', - constants.dLongitude) - - nLat = int((constants.latmax-constants.latmin)/comparisonLatRes)+1 - nLon = int((constants.lonmax-constants.lonmin)/comparisonLonRes)+1 - lat = numpy.linspace(constants.latmin, constants.latmax, nLat) - lon = numpy.linspace(constants.lonmin, constants.lonmax, nLon) - - descriptor = LatLonGridDescriptor.create(lat, lon, units='degrees') - - return descriptor # }}} - - def get_remapper(config, sourceDescriptor, comparisonDescriptor, mappingFilePrefix, method, logger=None): # {{{ """ @@ -143,250 +102,6 @@ def get_remapper(config, sourceDescriptor, comparisonDescriptor, return remapper # }}} -def get_mpas_climatology_dir_name(config, fieldName, mpasMeshName, - comparisonGridName=None): # {{{ - """ - Given config options, the name of a field and a string identifying the - months in a seasonal climatology, returns the full path for MPAS - climatology files before and after remapping. - - Parameters - ---------- - config : instance of MpasAnalysisConfigParser - Contains configuration options - - fieldName : str - Name of the field being mapped, used as a prefix for the climatology - file name. - - mpasMeshName : str - The name of the MPAS mesh - - comparisonGridName : str, optional - The name of the comparison grid (if any) - - Returns - ------- - climatologyFileName : str - The absolute path to a file where the climatology should be stored - before remapping. - - climatologyPrefix : str - The prfix including absolute path for climatology cache files before - remapping. - - remappedFileName : str - The absolute path to a file where the climatology should be stored - after remapping if ``comparisonGridName`` is supplied - - Authors - ------- - Xylar Asay-Davis - """ - - climatologyBaseDirectory = build_config_full_path( - config, 'output', 'mpasClimatologySubdirectory') - - climatologyDirectory = '{}/{}_{}'.format(climatologyBaseDirectory, - fieldName, - mpasMeshName) - - make_directories(climatologyDirectory) - - if comparisonGridName is None: - return climatologyDirectory - else: - remappedBaseDirectory = build_config_full_path( - config, 'output', 'mpasRemappedClimSubdirectory') - - remappedDirectory = '{}/{}_{}_to_{}'.format( - remappedBaseDirectory, fieldName, mpasMeshName, - comparisonGridName) - - make_directories(remappedDirectory) - - return (climatologyDirectory, remappedDirectory) - - # }}} - - -def get_ncclimo_season_file_name(directory, modelName, seasonName, startYear, - endYear): # {{{ - """ - Given config options, the name of a field and a string identifying the - months in a seasonal climatology, returns the full path for MPAS - climatology files before and after regridding. - - Parameters - ---------- - directory : str - the directory containing climatologies generated by ncclimo - - modelName : ['mpaso', 'mpascice'] - The name of the component for which the climatology is to be computed - - seasonName : str - One of the season names in ``constants.monthDictionary`` - - startYear, endYear : int - The start and end years of the climatology - - Returns - ------- - fileName : str - The path to the climatology file for the specified season. - - Authors - ------- - Xylar Asay-Davis - """ - - monthValues = sorted(constants.monthDictionary[seasonName]) - startMonth = monthValues[0] - endMonth = monthValues[-1] - - suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format(startYear, startMonth, - endYear, endMonth) - - if seasonName in constants.abrevMonthNames: - seasonName = '{:02d}'.format(monthValues[0]) - fileName = '{}/{}_{}_{}.nc'.format(directory, modelName, seasonName, - suffix) - return fileName # }}} - - -def compute_climatologies_with_ncclimo(config, inDirectory, outDirectory, - startYear, endYear, - variableList, modelName, - seasons='none', - decemberMode='sdd', - remapper=None, - remappedDirectory=None, - logger=None): # {{{ - ''' - Uses ncclimo to compute monthly, seasonal (DJF, MAM, JJA, SON) and annual - climatologies. - - Parameters - ---------- - config : instance of MpasAnalysisConfigParser - Contains configuration options - - inDirectory : str - The run directory containing timeSeriesStatsMonthly output - - outDirectory : str - The output directory where climatologies will be written - - - startYear, endYear : int - The start and end years of the climatology - - variableList : list of str - A list of variables to include in the climatology - - modeName : {'mpaso', 'mpascice'} - The name of the component for which the climatology is to be computed - - seasons : list of str - Seasons (keys in ``constants.monthDictionary`` other than individual - months, which will be removed automatically) over which monthly - climatologies should be aggregated. - - decemberMode : {'scd', 'sdd'}, optional - Whether years start in December (scd - seasonally continuous December) - or January (sdd - seasonally discontinuous December). If the former, - the data set begins with December of the year before startYear and ends - with November of endYear. Otherwise (the default), goes from January - of startYear to December of endYear. - - remapper : ``shared.intrpolation.Remapper`` object, optional - If present, a remapper that defines the source and desitnation grids - for remapping the climatologies. - - remappedDirectory : str, optional - If present, the path where remapped climatologies should be written. - By default, remapped files are stored in the same directory as the - climatologies on the source grid. Has no effect if ``remapper`` is - ``None``. - - logger : ``logging.Logger``, optional - A logger to which ncclimo output should be redirected - - Raises - ------ - OSError - If ``ncclimo`` is not in the system path. - - Authors - ------- - Xylar Asay-Davis - ''' - - if find_executable('ncclimo') is None: - raise OSError('ncclimo not found. Make sure the latest nco ' - 'package is installed: \n' - 'conda install nco\n' - 'Note: this presumes use of the conda-forge ' - 'channel.') - - parallelMode = config.get('execute', 'ncclimoParallelMode') - - # make sure to remove individual months from seasons - seasons = [season for season in seasons if season not in - constants.abrevMonthNames] - - args = ['ncclimo', - '--clm_md=mth', - '-a', decemberMode, - '-m', modelName, - '-p', parallelMode, - '-v', ','.join(variableList), - '--seasons={}'.format(','.join(seasons)), - '-s', '{:04d}'.format(startYear), - '-e', '{:04d}'.format(endYear), - '-i', inDirectory, - '-o', outDirectory] - - if remapper is not None: - args.extend(['-r', remapper.mappingFileName]) - if remappedDirectory is not None: - args.extend(['-O', remappedDirectory]) - - # set an environment variable to make sure we're not using czender's - # local version of NCO instead of one we have intentionally loaded - env = os.environ.copy() - env['NCO_PATH_OVERRIDE'] = 'No' - - if logger is None: - print 'running: {}'.format(' '.join(args)) - # make sure any output is flushed before we add output from the - # subprocess - sys.stdout.flush() - sys.stderr.flush() - - subprocess.check_call(args, env=env) - else: - logger.info('running: {}'.format(' '.join(args))) - for handler in logger.handlers: - handler.flush() - - process = subprocess.Popen(args, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, env=env) - stdout, stderr = process.communicate() - - if stdout: - logger.info(stdout) - if stderr: - for line in stderr.split('\n'): - logger.error(line) - - if process.returncode != 0: - raise subprocess.CalledProcessError(process.returncode, - ' '.join(args)) - # }}} - - def get_observation_climatology_file_names(config, fieldName, monthNames, componentName, remapper): # {{{ """ @@ -560,156 +275,6 @@ def compute_climatology(ds, monthValues, calendar=None, return climatology # }}} -def cache_climatologies(ds, monthValues, config, cachePrefix, calendar, - printProgress=False): # {{{ - ''' - Cache NetCDF files for each year of an annual climatology, and then use - the cached files to compute a climatology for the full range of years. - The start and end years of the climatology are taken from ``config``, and - are updated in ``config`` if the data set ``ds`` doesn't contain this - full range. - - Note: only works with climatologies where the mask (locations of ``NaN`` - values) doesn't vary with time. - - Parameters - ---------- - ds : ``xarray.Dataset`` or ``xarray.DataArray`` object - A data set with a ``Time`` coordinate expressed as days since - 0001-01-01 - - monthValues : int or array-like of ints - A single month or an array of months to be averaged together - - config : instance of MpasAnalysisConfigParser - Contains configuration options - - cachePrefix : str - The file prefix (including path) to which the year (or years) will be - appended as cache files are stored - - calendar : ``{'gregorian', 'gregorian_noleap'}`` - The name of one of the calendars supported by MPAS cores, used to - determine ``year`` and ``month`` from ``Time`` coordinate - - printProgress: bool, optional - Whether progress messages should be printed as the climatology is - computed - - Returns - ------- - climatology : object of same type as ``ds`` - A data set without the ``'Time'`` coordinate containing the mean - of ds over all months in monthValues, weighted by the number of days - in each month. - - Authors - ------- - Xylar Asay-Davis - ''' - startYearClimo = config.getint('climatology', 'startYear') - endYearClimo = config.getint('climatology', 'endYear') - yearsPerCacheFile = config.getint('climatology', 'yearsPerCacheFile') - - if printProgress: - print ' Computing and caching climatologies covering {}-year ' \ - 'spans...'.format(yearsPerCacheFile) - - ds = add_years_months_days_in_month(ds, calendar) - - cacheInfo, cacheIndices = _setup_climatology_caching(ds, startYearClimo, - endYearClimo, - yearsPerCacheFile, - cachePrefix, - monthValues) - - ds = ds.copy() - ds.coords['cacheIndices'] = ('Time', cacheIndices) - - # compute and store each cache file with interval yearsPerCacheFile - _cache_individual_climatologies(ds, cacheInfo, printProgress, - yearsPerCacheFile, monthValues, - calendar) - - # compute the aggregate climatology - climatology = _cache_aggregated_climatology(startYearClimo, endYearClimo, - cachePrefix, printProgress, - monthValues, cacheInfo) - - return climatology # }}} - - -def update_climatology_bounds_from_file_names(inputFiles, config): # {{{ - """ - - Update the start and end years and dates for climatologies based on the - years actually available in the list of files. - - Parameters - ---------- - inputFiles : list of str - A list of file names ending with dates (before the '.nc' extension) - - config : instance of MpasAnalysisConfigParser - Contains configuration options - - Returns - ------- - changed : bool - Whether the start and end years were changed - - startYear, endYear : int - The start and end years of the data set - - Authors - ------- - Xylar Asay-Davis - """ - requestedStartYear = config.getint('climatology', 'startYear') - requestedEndYear = config.getint('climatology', 'endYear') - - dates = sorted([fileName[-13:-6] for fileName in inputFiles]) - years = [int(date[0:4]) for date in dates] - months = [int(date[5:7]) for date in dates] - - # search for the start of the first full year - firstIndex = 0 - while(firstIndex < len(years) and months[firstIndex] != 1): - firstIndex += 1 - startYear = years[firstIndex] - - # search for the end of the last full year - lastIndex = len(years)-1 - while(lastIndex >= 0 and months[lastIndex] != 12): - lastIndex -= 1 - endYear = years[lastIndex] - - changed = False - if startYear != requestedStartYear or endYear != requestedEndYear: - print "Warning:climatology start and/or end year different from " \ - "requested\n" \ - "requested: {:04d}-{:04d}\n" \ - "actual: {:04d}-{:04d}\n".format(requestedStartYear, - requestedEndYear, - startYear, - endYear) - - config.set('climatology', 'startYear', str(startYear)) - config.set('climatology', 'endYear', str(endYear)) - - startDate = '{:04d}-01-01_00:00:00'.format(startYear) - config.set('climatology', 'startDate', startDate) - endDate = '{:04d}-12-31_23:59:59'.format(endYear) - config.set('climatology', 'endDate', endDate) - changed = True - - else: - startDate = config.get('climatology', 'startDate') - endDate = config.get('climatology', 'endDate') - - return changed, startYear, endYear, startDate, endDate # }}} - - def add_years_months_days_in_month(ds, calendar=None): # {{{ ''' Add ``year``, ``month`` and ``daysInMonth`` as data arrays in ``ds``. diff --git a/mpas_analysis/shared/climatology/comparison_descriptors.py b/mpas_analysis/shared/climatology/comparison_descriptors.py new file mode 100644 index 000000000..a1e993088 --- /dev/null +++ b/mpas_analysis/shared/climatology/comparison_descriptors.py @@ -0,0 +1,144 @@ +""" +Functions for creating climatologies from monthly time series data + +Authors +------- +Xylar Asay-Davis +""" + +import numpy +import pyproj + +from ..constants import constants + +from ..grid import LatLonGridDescriptor, ProjectionGridDescriptor + + +def get_comparison_descriptor(config, comparisonGridName): # {{{ + """ + Get the comparison grid descriptor from the comparisonGridName. + + Parameters + ---------- + config : MpasAnalysisConfigParser object + Contains configuration options + + comparisonGridName : {'latlon', 'antarctic'} + The name of the comparison grid to use for remapping. + + Raises + ------ + ValueError + If comparisonGridName does not describe a known comparions grid + + Authors + ------- + Xylar Asay-Davis + """ + if comparisonGridName == 'latlon': + comparisonDescriptor = \ + _get_lat_lon_comparison_descriptor(config) + elif comparisonGridName == 'antarctic': + comparisonDescriptor = \ + _get_antarctic_stereographic_comparison_descriptor(config) + else: + raise ValueError('Unknown comaprison grid type {}'.format( + comparisonGridName)) + return comparisonDescriptor # }}} + + +def get_antarctic_stereographic_projection(): # {{{ + """ + Get a projection for an Antarctic steregraphic comparison grid + + Returns + ------- + projection : ``pyproj.Proj`` object + The projection + + Authors + ------- + Xylar Asay-Davis + """ + projection = pyproj.Proj('+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 ' + '+k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + + return projection # }}} + + +def _get_lat_lon_comparison_descriptor(config): # {{{ + """ + Get a descriptor of the lat/lon comparison grid, used for remapping and + determining the grid name + + Parameters + ---------- + config : instance of ``MpasAnalysisConfigParser`` + Contains configuration options + + Returns + ------- + descriptor : ``LatLonGridDescriptor`` object + A descriptor of the lat/lon grid + + Authors + ------- + Xylar Asay-Davis + """ + climSection = 'climatology' + + comparisonLatRes = config.getWithDefault(climSection, + 'comparisonLatResolution', + constants.dLatitude) + comparisonLonRes = config.getWithDefault(climSection, + 'comparisonLatResolution', + constants.dLongitude) + + nLat = int((constants.latmax-constants.latmin)/comparisonLatRes)+1 + nLon = int((constants.lonmax-constants.lonmin)/comparisonLonRes)+1 + lat = numpy.linspace(constants.latmin, constants.latmax, nLat) + lon = numpy.linspace(constants.lonmin, constants.lonmax, nLon) + + descriptor = LatLonGridDescriptor.create(lat, lon, units='degrees') + + return descriptor # }}} + + +def _get_antarctic_stereographic_comparison_descriptor(config): # {{{ + """ + Get a descriptor of an Antarctic steregraphic comparison grid, used for + remapping and determining the grid name + + Parameters + ---------- + config : instance of ``MpasAnalysisConfigParser`` + Contains configuration options + + Returns + ------- + descriptor : ``ProjectionGridDescriptor`` object + A descriptor of the Antarctic comparison grid + + Authors + ------- + Xylar Asay-Davis + """ + climSection = 'climatology' + + comparisonStereoWidth = config.getfloat(climSection, + 'comparisonAntarcticStereoWidth') + comparisonStereoResolution = config.getfloat( + climSection, 'comparisonAntarcticStereoResolution') + + projection = get_antarctic_stereographic_projection() + + xMax = 0.5*comparisonStereoWidth*1e3 + nx = int(comparisonStereoWidth/comparisonStereoResolution)+1 + x = numpy.linspace(-xMax, xMax, nx) + + meshName = '{}x{}km_{}km_Antarctic_stereo'.format( + comparisonStereoWidth, comparisonStereoWidth, + comparisonStereoResolution) + descriptor = ProjectionGridDescriptor.create(projection, x, x, meshName) + + return descriptor # }}} diff --git a/mpas_analysis/shared/climatology/mpas_climatology_task.py b/mpas_analysis/shared/climatology/mpas_climatology_task.py new file mode 100644 index 000000000..d08441a65 --- /dev/null +++ b/mpas_analysis/shared/climatology/mpas_climatology_task.py @@ -0,0 +1,441 @@ +import xarray +import os +import warnings +import subprocess +from distutils.spawn import find_executable + +from ..analysis_task import AnalysisTask + +from ..constants import constants + +from ..io.utility import build_config_full_path, make_directories + + +class MpasClimatologyTask(AnalysisTask): # {{{ + ''' + An analysis tasks for computing climatologies from output from the + ``timeSeriesStatsMonthly`` analysis member. + + Attributes + ---------- + + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + seasons : list of str + A list of seasons (keys in ``shared.constants.monthDictionary``) + over which the climatology should be computed or ['none'] if only + monthly climatologies are needed. + + inputFiles : list of str + A list of input files used to compute the climatologies. + + ncclimoModel : {'mpaso', 'mpascice'} + The name of the component expected by ``ncclimo`` + + startDate, endDate : str + The start and end dates of the climatology as strings + + startYear, endYear : int + The start and end years of the climatology + + Authors + ------- + Xylar Asay-Davis + ''' + + def __init__(self, config, componentName): # {{{ + ''' + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + config : ``MpasAnalysisConfigParser`` + Contains configuration options + + componentName : {'ocean', 'seaIce'} + The name of the component (same as the folder where the task + resides) + + Authors + ------- + Xylar Asay-Davis + ''' + self.variableList = [] + self.seasons = [] + + tags = ['climatology'] + + if componentName == 'ocean': + self.ncclimoModel = 'mpaso' + elif componentName == 'seaIce': + self.ncclimoModel = 'mpascice' + else: + raise ValueError('component {} is not supported by ncclimo.\n' + 'Check with Charlie Zender and Xylar Asay-Davis\n' + 'about getting it added'.format(componentName)) + + suffix = componentName[0].upper() + componentName[1:] + taskName = 'mpasClimatology{}'.format(suffix) + + # call the constructor from the base class (AnalysisTask) + super(MpasClimatologyTask, self).__init__( + config=config, + taskName=taskName, + componentName=componentName, + tags=tags) + + # }}} + + def add_variables(self, variableList, seasons=None): # {{{ + ''' + Add one or more variables and optionally one or more seasons for which + to compute climatologies. + + Parameters + ---------- + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed or ['none'] (not ``None``) if only monthly + climatologies are needed. + + Authors + ------- + Xylar Asay-Davis + ''' + + for variable in variableList: + if variable not in self.variableList: + self.variableList.append(variable) + + if seasons is not None: + for season in seasons: + if season not in self.seasons: + self.seasons.append(season) + + self._setup_file_names() + + # }}} + + def setup_and_check(self): # {{{ + ''' + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError : + If a restart file is not available from which to read mesh + information or if no history files are available from which to + compute the climatology in the desired time range. + + Authors + ------- + Xylar Asay-Davis + ''' + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(MpasClimatologyTask, self).setup_and_check() + + self.check_analysis_enabled( + analysisOptionName='config_am_timeseriesstatsmonthly_enable', + raiseException=True) + + # get a list of timeSeriesStats output files from the streams file, + # reading only those that are between the start and end dates + startDate = self.config.get('climatology', 'startDate') + endDate = self.config.get('climatology', 'endDate') + streamName = 'timeSeriesStatsMonthlyOutput' + self.inputFiles = self.historyStreams.readpath( + streamName, startDate=startDate, endDate=endDate, + calendar=self.calendar) + + if len(self.inputFiles) == 0: + raise IOError('No files were found in stream {} between {} and ' + '{}.'.format(streamName, startDate, endDate)) + + self._update_climatology_bounds_from_file_names() + + # }}} + + def run_task(self): # {{{ + ''' + Compute the requested climatologies + + Authors + ------- + Xylar Asay-Davis + ''' + + if len(self.variableList) == 0: + # nothing to do + return + + self.logger.info('\nComputing MPAS climatologies from files:\n' + ' {} through\n {}'.format( + os.path.basename(self.inputFiles[0]), + os.path.basename(self.inputFiles[-1]))) + + if self.seasons[0] is 'none': + seasonsToCheck = ['{:02d}'.format(month) for month in range(1, 13)] + else: + seasonsToCheck = self.seasons + + allExist = True + for season in seasonsToCheck: + + climatologyFileName, climatologyDirectory = \ + self.get_file_name(season, returnDir=True) + + if not os.path.exists(climatologyFileName): + allExist = False + break + + if allExist: + # make sure all the necessary variables are also present + ds = xarray.open_dataset(self.get_file_name(seasonsToCheck[0], + returnDir=False)) + + for variableName in self.variableList: + if variableName not in ds.variables: + allExist = False + break + + if not allExist: + self._compute_climatologies_with_ncclimo( + inDirectory=self.historyDirectory, + outDirectory=climatologyDirectory) + + # }}} + + def get_file_name(self, season, returnDir=False): # {{{ + """ + Given config options, the name of a field and a string identifying the + months in a seasonal climatology, returns the full path for MPAS + climatology files before and after remapping. + + Parameters + ---------- + season : str + One of the seasons in ``constants.monthDictionary`` + + mpasMeshName : str + The name of the MPAS mesh + + returnDir : bool, optional + Return the directory as well + + Returns + ------- + fileName : str + The path to the climatology file for the specified season. + + Authors + ------- + Xylar Asay-Davis + """ + + fileName = self._outputFiles[season] + + if returnDir: + directory = self._outputDirs[season] + return fileName, directory + else: + return fileName # }}} + + def _update_climatology_bounds_from_file_names(self): # {{{ + """ + Update the start and end years and dates for climatologies based on the + years actually available in the list of files. + + Authors + ------- + Xylar Asay-Davis + """ + + config = self.config + + requestedStartYear = config.getint('climatology', 'startYear') + requestedEndYear = config.getint('climatology', 'endYear') + + dates = sorted([fileName[-13:-6] for fileName in self.inputFiles]) + years = [int(date[0:4]) for date in dates] + months = [int(date[5:7]) for date in dates] + + # search for the start of the first full year + firstIndex = 0 + while(firstIndex < len(years) and months[firstIndex] != 1): + firstIndex += 1 + startYear = years[firstIndex] + + # search for the end of the last full year + lastIndex = len(years)-1 + while(lastIndex >= 0 and months[lastIndex] != 12): + lastIndex -= 1 + endYear = years[lastIndex] + + if startYear != requestedStartYear or endYear != requestedEndYear: + message = "climatology start and/or end year different from " \ + "requested\n" \ + "requestd: {:04d}-{:04d}\n" \ + "actual: {:04d}-{:04d}\n".format(requestedStartYear, + requestedEndYear, + startYear, + endYear) + warnings.warn(message) + config.set('climatology', 'startYear', str(startYear)) + config.set('climatology', 'endYear', str(endYear)) + + startDate = '{:04d}-01-01_00:00:00'.format(startYear) + config.set('climatology', 'startDate', startDate) + endDate = '{:04d}-12-31_23:59:59'.format(endYear) + config.set('climatology', 'endDate', endDate) + + else: + startDate = config.get('climatology', 'startDate') + endDate = config.get('climatology', 'endDate') + + self.startDate = startDate + self.endDate = endDate + self.startYear = startYear + self.endYear = endYear + + # }}} + + def _setup_file_names(self): # {{{ + """ + Create a dictionary of file names and directories for this climatology + + Authors + ------- + Xylar Asay-Davis + """ + + config = self.config + climatologyBaseDirectory = build_config_full_path( + config, 'output', 'mpasClimatologySubdirectory') + + mpasMeshName = config.get('input', 'mpasMeshName') + + self._outputDirs = {} + self._outputFiles = {} + + directory = '{}/unmasked_{}'.format(climatologyBaseDirectory, + mpasMeshName) + + make_directories(directory) + for season in self.seasons: + monthValues = sorted(constants.monthDictionary[season]) + startMonth = monthValues[0] + endMonth = monthValues[-1] + + suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format( + self.startYear, startMonth, self.endYear, endMonth) + + if season in constants.abrevMonthNames: + season = '{:02d}'.format(monthValues[0]) + fileName = '{}/{}_{}_{}.nc'.format(directory, self.ncclimoModel, + season, suffix) + + self._outputDirs[season] = directory + self._outputFiles[season] = fileName + + # }}} + + def _compute_climatologies_with_ncclimo(self, inDirectory, outDirectory, + remapper=None, + remappedDirectory=None): # {{{ + ''' + Uses ncclimo to compute monthly, seasonal and/or annual climatologies. + + Parameters + ---------- + inDirectory : str + The run directory containing timeSeriesStatsMonthly output + + outDirectory : str + The output directory where climatologies will be written + + remapper : ``shared.intrpolation.Remapper`` object, optional + If present, a remapper that defines the source and desitnation + grids for remapping the climatologies. + + remappedDirectory : str, optional + If present, the path where remapped climatologies should be + written. By default, remapped files are stored in the same + directory as the climatologies on the source grid. Has no effect + if ``remapper`` is ``None``. + + Raises + ------ + OSError + If ``ncclimo`` is not in the system path. + + Author + ------ + Xylar Asay-Davis + ''' + + if find_executable('ncclimo') is None: + raise OSError('ncclimo not found. Make sure the latest nco ' + 'package is installed: \n' + 'conda install nco\n' + 'Note: this presumes use of the conda-forge ' + 'channel.') + + parallelMode = self.config.get('execute', 'ncclimoParallelMode') + + args = ['ncclimo', + '--clm_md=mth', + '-a', 'sdd', + '-m', self.ncclimoModel, + '-p', parallelMode, + '-v', ','.join(self.variableList), + '--seasons={}'.format(','.join(self.seasons)), + '-s', '{:04d}'.format(self.startYear), + '-e', '{:04d}'.format(self.endYear), + '-i', inDirectory, + '-o', outDirectory] + + if remapper is not None: + args.extend(['-r', remapper.mappingFileName]) + if remappedDirectory is not None: + args.extend(['-O', remappedDirectory]) + + self.logger.info('running: {}'.format(' '.join(args))) + for handler in self.logger.handlers: + handler.flush() + + # set an environment variable to make sure we're not using czender's + # local version of NCO instead of one we have intentionally loaded + env = os.environ.copy() + env['NCO_PATH_OVERRIDE'] = 'No' + + process = subprocess.Popen(args, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, env=env) + stdout, stderr = process.communicate() + + if stdout: + self.logger.info(stdout) + if stderr: + for line in stderr.split('\n'): + self.logger.error(line) + + if process.returncode != 0: + raise subprocess.CalledProcessError(process.returncode, + ' '.join(args)) + + # }}} + # }}} + + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py b/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py new file mode 100644 index 000000000..7582691f8 --- /dev/null +++ b/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py @@ -0,0 +1,455 @@ +import xarray as xr +import os + +from ..analysis_task import AnalysisTask + +from ..constants import constants + +from ..io.utility import build_config_full_path, make_directories +from ..io import write_netcdf + +from .climatology import get_remapper +from .comparison_descriptors import get_comparison_descriptor + +from ..grid import MpasMeshDescriptor + +from ..mpas_xarray import mpas_xarray + + +class RemapMpasClimatologySubtask(AnalysisTask): # {{{ + ''' + An analysis tasks for computing climatologies from output from the + ``timeSeriesStatsMonthly`` analysis member. + + Attributes + ---------- + + climatologyName : str + A name that describes the climatology (e.g. a short version of + the important field(s) in the climatology) used to name the + subdirectories for each stage of the climatology + + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + iselValues : dict + A dictionary of dimensions and indices (or ``None``) used to extract + a slice of the MPAS field. + + seasons : list of str + A list of seasons (keys in ``shared.constants.monthDictionary``) + over which the climatology should be computed or ['none'] if only + monthly climatologies are needed. + + comparisonGridNames : list of {'latlon', 'antarctic'} + The name(s) of the comparison grid to use for remapping. + + restartFileName : str + If ``comparisonGridName`` is not ``None``, the name of a restart + file from which the MPAS mesh can be read. + + Authors + ------- + Xylar Asay-Davis + ''' + + def __init__(self, mpasClimatologyTask, parentTask, climatologyName, + variableList, seasons, comparisonGridNames=['latlon'], + iselValues=None): + # {{{ + ''' + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped + + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + climatologyName : str + A name that describes the climatology (e.g. a short version of + the important field(s) in the climatology) used to name the + subdirectories for each stage of the climatology + + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed or ['none'] (not ``None``) if only monthly + climatologies are needed. + + comparisonGridNames : list of {'latlon', 'antarctic'}, optinal + The name(s) of the comparison grid to use for remapping. + + iselValues : dict, optional + A dictionary of dimensions and indices (or ``None``) used to + extract a slice of the MPAS field(s). + + Authors + ------- + Xylar Asay-Davis + ''' + tags = ['climatology'] + + # call the constructor from the base class (AnalysisTask) + super(RemapMpasClimatologySubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + subtaskName='remapMpasClimatology', + componentName=parentTask.componentName, + tags=tags) + + self.variableList = variableList + self.seasons = seasons + self.comparisonGridNames = comparisonGridNames + self.iselValues = iselValues + self.climatologyName = climatologyName + self.mpasClimatologyTask = mpasClimatologyTask + + self.run_after(mpasClimatologyTask) + + parentTask.add_subtask(self) + + # this is a stopgap until MPAS implements the _FillValue attribute + # correctly + self._fillValue = -9.99999979021476795361e+33 + + # }}} + + def setup_and_check(self): # {{{ + ''' + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError : + If a restart file is not available from which to read mesh + information or if no history files are available from which to + compute the climatology in the desired time range. + + Authors + ------- + Xylar Asay-Davis + ''' + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(RemapMpasClimatologySubtask, self).setup_and_check() + + try: + self.restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS restart file found: need at least one ' + 'restart file to perform remapping of ' + 'climatologies.') + + # we set up the remapper here because ESFM_RegridWeightGen seems to + # have trouble if it runs in another process (or in several at once) + self._setup_remappers() + + # don't add the variables and seasons to mpasClimatologyTask until + # we're sure this subtask is supposed to run + self.mpasClimatologyTask.add_variables(self.variableList, self.seasons) + + self._setup_file_names() + + # make the mapping directory, because doing so within each process + # seems to be giving ESMF_RegridWeightGen some trouble + mappingSubdirectory = build_config_full_path(self.config, 'output', + 'mappingSubdirectory') + make_directories(mappingSubdirectory) + + # }}} + + def run_task(self): # {{{ + ''' + Compute the requested climatologies + + Authors + ------- + Xylar Asay-Davis + ''' + + self.logger.info('\nRemapping climatology {}'.format( + self.climatologyName)) + + dsMask = xr.open_dataset(self.mpasClimatologyTask.inputFiles[0]) + dsMask = mpas_xarray.subset_variables(dsMask, self.variableList) + iselValues = {'Time': 0} + if self.iselValues is not None: + iselValues.update(self.iselValues) + # select only Time=0 and possibly only the desired vertical + # slice + dsMask = dsMask.isel(**iselValues) + + for season in self.seasons: + self._mask_climatologies(season, dsMask) + + for comparisonGridName in self.comparisonGridNames: + + for season in self.seasons: + + maskedClimatologyFileName = self.get_file_name( + season, 'masked', comparisonGridName) + + remappedFileName = self.get_file_name( + season, 'remapped', comparisonGridName) + + if not os.path.exists(remappedFileName): + self._remap(inFileName=maskedClimatologyFileName, + outFileName=remappedFileName, + remapper=self.remappers[comparisonGridName], + comparisonGridName=comparisonGridName) + # }}} + + def get_file_name(self, season, stage, comparisonGridName=None): # {{{ + """ + Given config options, the name of a field and a string identifying the + months in a seasonal climatology, returns the full path for MPAS + climatology files before and after remapping. + + Parameters + ---------- + season : str + One of the seasons in ``constants.monthDictionary`` + + stage : {'masked', 'remapped'} + The stage of the masking and remapping process + + comparisonGridName : {'latlon', 'antarctic'}, optional + The name of the comparison grid to use for remapping. + + Returns + ------- + fileName : str + The path to the climatology file for the specified season. + + Authors + ------- + Xylar Asay-Davis + """ + + if stage == 'remapped': + key = (season, stage, comparisonGridName) + else: + key = (season, stage) + fileName = self._outputFiles[key] + + return fileName # }}} + + def _setup_remappers(self): # {{{ + """ + Set up the remappers for remapping from the MPAS to the comparison + grids. + + Authors + ------- + Xylar Asay-Davis + """ + config = self.config + + # make reamppers + mappingFilePrefix = 'map' + self.remappers = {} + for comparisonGridName in self.comparisonGridNames: + + comparisonDescriptor = get_comparison_descriptor( + config, comparisonGridName) + self.comparisonGridName = comparisonDescriptor.meshName + mpasDescriptor = MpasMeshDescriptor( + self.restartFileName, meshName=config.get('input', + 'mpasMeshName')) + self.mpasMeshName = mpasDescriptor.meshName + + self.remappers[comparisonGridName] = get_remapper( + config=config, sourceDescriptor=mpasDescriptor, + comparisonDescriptor=comparisonDescriptor, + mappingFilePrefix=mappingFilePrefix, + method=config.get('climatology', 'mpasInterpolationMethod'), + logger=self.logger) + + # }}} + + def _setup_file_names(self): # {{{ + """ + Create a dictionary of file names and directories for this climatology + + Authors + ------- + Xylar Asay-Davis + """ + + config = self.config + climatologyBaseDirectory = build_config_full_path( + config, 'output', 'mpasClimatologySubdirectory') + + mpasMeshName = config.get('input', 'mpasMeshName') + + comparisonFullMeshNames = {} + for comparisonGridName in self.comparisonGridNames: + comparisonDescriptor = get_comparison_descriptor( + config, comparisonGridName) + comparisonFullMeshNames[comparisonGridName] = \ + comparisonDescriptor.meshName + + keys = [] + for season in self.seasons: + stage = 'masked' + keys.append((season, stage)) + stage = 'remapped' + for comparisonGridName in self.comparisonGridNames: + keys.append((season, stage, comparisonGridName)) + + self._outputDirs = {} + self._outputFiles = {} + + for key in keys: + season = key[0] + stage = key[1] + if stage == 'remapped': + comparisonGridName = key[2] + + stageDirectory = '{}/{}'.format(climatologyBaseDirectory, stage) + + if stage == 'masked': + directory = '{}/{}_{}'.format( + stageDirectory, self.climatologyName, + mpasMeshName) + elif stage == 'remapped': + directory = '{}/{}_{}_to_{}'.format( + stageDirectory, + self.climatologyName, + mpasMeshName, + comparisonFullMeshNames[comparisonGridName]) + + make_directories(directory) + + monthValues = sorted(constants.monthDictionary[season]) + startMonth = monthValues[0] + endMonth = monthValues[-1] + + suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format( + self.mpasClimatologyTask.startYear, startMonth, + self.mpasClimatologyTask.endYear, endMonth) + + if season in constants.abrevMonthNames: + season = '{:02d}'.format(monthValues[0]) + fileName = '{}/{}_{}_{}.nc'.format( + directory, self.mpasClimatologyTask.ncclimoModel, season, + suffix) + + self._outputDirs[key] = directory + self._outputFiles[key] = fileName + # }}} + + def _mask_climatologies(self, season, dsMask): # {{{ + ''' + For each season, creates a masked version of the climatology + + Parameters + ---------- + season : str + The name of the season to be masked + + dsMask : ``xarray.Dataset`` object + A data set (from the first input file) that can be used to + determine the mask in MPAS output files. + + Author + ------ + Xylar Asay-Davis + ''' + + climatologyFileName = self.mpasClimatologyTask.get_file_name(season) + + maskedClimatologyFileName = self.get_file_name(season, 'masked') + + if not os.path.exists(maskedClimatologyFileName): + # slice and mask the data set + climatology = xr.open_dataset(climatologyFileName) + climatology = mpas_xarray.subset_variables(climatology, + self.variableList) + iselValues = {'Time': 0} + if self.iselValues is not None: + iselValues.update(self.iselValues) + # select only Time=0 and possibly only the desired vertical + # slice + climatology = climatology.isel(**iselValues) + + # mask the data set + for variableName in self.variableList: + climatology[variableName] = \ + climatology[variableName].where( + dsMask[variableName] != self._fillValue) + + write_netcdf(climatology, maskedClimatologyFileName) + # }}} + + def _remap(self, inFileName, outFileName, remapper, comparisonGridName): + # {{{ + """ + Performs remapping either using ``ncremap`` or the native python code, + depending on the requested setting and the comparison grid + + Parameters + ---------- + inFileName : str + The name of the input file to be remapped. + + outFileName : str + The name of the output file to which the remapped data set should + be written. + + remapper : ``Remapper`` object + A remapper that can be used to remap files or data sets to a + comparison grid. + + comparisonGridNames : {'latlon', 'antarctic'} + The name of the comparison grid to use for remapping. + + Authors + ------- + Xylar Asay-Davis + """ + if remapper.mappingFileName is None: + # no remapping is needed + return + + useNcremap = self.config.getboolean('climatology', 'useNcremap') + + if comparisonGridName == 'antarctic': + # ncremap doesn't support polar stereographic grids + useNcremap = False + + renormalizationThreshold = self.config.getfloat( + 'climatology', 'renormalizationThreshold') + + if useNcremap: + remapper.remap_file(inFileName=inFileName, + outFileName=outFileName, + overwrite=True, + renormalize=renormalizationThreshold, + logger=self.logger) + else: + + climatologyDataSet = xr.open_dataset(inFileName) + + remappedClimatology = remapper.remap(climatologyDataSet, + renormalizationThreshold) + write_netcdf(remappedClimatology, outFileName) + # }}} + + # }}} + + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/shared/html/pages.py b/mpas_analysis/shared/html/pages.py index ed7bd13a3..c1ab21f3a 100644 --- a/mpas_analysis/shared/html/pages.py +++ b/mpas_analysis/shared/html/pages.py @@ -16,7 +16,7 @@ def generate_html(config, analyses): # {{{ config : ``MpasAnalysisConfigParser`` object contains config options - analysis : list of ``AnalysisTask`` objects + analysis : ``OrderedDict`` of ``AnalysisTask`` objects the analysis tasks that generated the plots to include in the webpages. The ``list_xml_files()`` method will be called on each task to get the list of files to include on the webpage for the associated @@ -38,7 +38,7 @@ def generate_html(config, analyses): # {{{ # add images from each analysis task, creating ga dictionary of components missingCount = 0 - for analysisTask in analyses: + for analysisTask in analyses.itervalues(): for fileName in analysisTask.xmlFileNames: try: ComponentPage.add_image(fileName, config, components) diff --git a/mpas_analysis/test/test_climatology.py b/mpas_analysis/test/test_climatology.py index f53af9d8f..5238ca62f 100644 --- a/mpas_analysis/test/test_climatology.py +++ b/mpas_analysis/test/test_climatology.py @@ -18,11 +18,10 @@ from mpas_analysis.configuration.MpasAnalysisConfigParser \ import MpasAnalysisConfigParser from mpas_analysis.shared.climatology import \ - get_lat_lon_comparison_descriptor, get_remapper, \ - get_mpas_climatology_dir_name, get_observation_climatology_file_names, \ + get_comparison_descriptor, get_remapper, \ + get_observation_climatology_file_names, \ add_years_months_days_in_month, compute_climatology, \ - compute_monthly_climatology, update_climatology_bounds_from_file_names, \ - cache_climatologies + compute_monthly_climatology from mpas_analysis.shared.grid import MpasMeshDescriptor, LatLonGridDescriptor from mpas_analysis.shared.constants import constants @@ -74,7 +73,7 @@ def setup_mpas_remapper(self, config): mpasMeshFileName = '{}/mpasMesh.nc'.format(self.datadir) comparisonDescriptor = \ - get_lat_lon_comparison_descriptor(config) + get_comparison_descriptor(config, comparisonGridName='latlon') mpasDescriptor = MpasMeshDescriptor( mpasMeshFileName, meshName=config.get('input', 'mpasMeshName')) @@ -91,7 +90,7 @@ def setup_obs_remapper(self, config, fieldName): gridFileName = '{}/obsGrid.nc'.format(self.datadir) comparisonDescriptor = \ - get_lat_lon_comparison_descriptor(config) + get_comparison_descriptor(config, comparisonGridName='latlon') obsDescriptor = LatLonGridDescriptor.read(fileName=gridFileName, latVarName='lat', @@ -193,26 +192,6 @@ def test_get_observations_remapper(self): shutil.copyfile(defaultMappingFileName, explicitMappingFileName) - - def test_get_mpas_climatology_dir_name(self): - config = self.setup_config() - fieldName = 'sst' - - remapper = self.setup_mpas_remapper(config) - - (climatologyDirectory, remappedDirectory) = \ - get_mpas_climatology_dir_name( - config, fieldName, - remapper.sourceDescriptor.meshName, - remapper.destinationDescriptor.meshName) - expectedClimatologyDirectory = \ - '{}/clim/mpas/sst_QU240'.format(self.test_dir) - self.assertEqual(climatologyDirectory, expectedClimatologyDirectory) - - expectedRemappedDirectory = '{}/clim/mpas/remap/sst_QU240_to_' \ - '0.5x0.5degree'.format(self.test_dir) - self.assertEqual(remappedDirectory, expectedRemappedDirectory) - def test_get_observation_climatology_file_names(self): config = self.setup_config() fieldName = 'sst' @@ -293,207 +272,5 @@ def test_compute_monthly_climatology(self): self.assertArrayApproxEqual(monthlyClimatology.month.values, refClimatology.month.values) - def test_update_climatology_bounds_from_file_names(self): - config = self.setup_config() - - startYear = 8 - endYear = 10 - config.set('climatology', 'startYear', str(startYear)) - config.set('climatology', 'endYear', str(endYear)) - startDate = '{:04d}-01-01_00:00:00'.format(startYear) - config.set('climatology', 'startDate', startDate) - endDate = '{:04d}-12-31_23:59:59'.format(endYear) - config.set('climatology', 'endDate', endDate) - - # first a list of files that is consistent with the requested years - inputFiles = [] - for year in range(startYear, endYear+1): - for month in range(1, 13): - inputFiles.append('someInput-{:04d}-{:02d}-01.nc'.format( - year, month)) - - changed, outStartYear, outEndYear, outStartDate, outEndDate = \ - update_climatology_bounds_from_file_names(inputFiles, config) - - assert(not changed) - assert(outStartYear == startYear) - assert(outEndYear == endYear) - assert(outStartDate == startDate) - assert(outEndDate == endDate) - - # next, a case where the output data is only there up to year 8 - inputFiles = [] - for year in range(startYear, startYear+1): - for month in range(1, 13): - inputFiles.append('someInput-{:04d}-{:02d}-01.nc'.format( - year, month)) - - changed, outStartYear, outEndYear, outStartDate, outEndDate = \ - update_climatology_bounds_from_file_names(inputFiles, config) - - assert(changed) - assert(outStartYear == startYear) - assert(outEndYear == startYear) - assert(outStartDate == startDate) - assert(outEndDate == '{:04d}-12-31_23:59:59'.format(startYear)) - - def cache_climatologies_setup(self): - config = self.setup_config() - calendar = 'gregorian_noleap' - ds = self.open_test_ds(config, calendar) - fieldName = 'mld' - climFileName = '{}/refSeasonalClim.nc'.format(self.datadir) - refClimatology = xarray.open_dataset(climFileName) - - remapper = self.setup_mpas_remapper(config) - - return {'config': config, 'calendar': calendar, 'ds': ds, - 'fieldName': fieldName, 'climFileName': climFileName, - 'refClimatology': refClimatology, 'remapper': remapper} - - def test_jan_1yr_climo_test1(self): - setup = self.cache_climatologies_setup() - # test1: Just January, 1-year climatologies are cached; only one file - # is produced with suffix year0002; a second run of - # cache_climatologies doesn't modify any files - test1 = {'monthNames': 'Jan', - 'monthValues': [1], - 'yearsPerCacheFile': 1, - 'expectedSuffixes': ['year0002'], - 'expectedModified': [False], - # weird value because first time step of Jan. missing in ds - 'expectedDays': 30.958333, - 'expectedMonths': 1, - 'refClimatology': None} - self.cache_climatologies_driver(test1, **setup) - - def test_jfm_1yr_climo_test2(self): - setup = self.cache_climatologies_setup() - # same as test1 but with JFM - test2 = {'monthNames': 'JFM', - 'monthValues': constants.monthDictionary['JFM'], - 'yearsPerCacheFile': 1, - 'expectedSuffixes': ['year0002'], - 'expectedModified': [False], - # weird value because first time step of Jan. missing in ds - 'expectedDays': 89.958333, - 'expectedMonths': 3, - 'refClimatology': setup['refClimatology']} - self.cache_climatologies_driver(test2, **setup) - - def test_jan_2yr_climo_test3(self): - setup = self.cache_climatologies_setup() - # test3: 2-year climatologies are cached; 2 files are produced - # with suffix years0002-0003 (the "individual" climatology - # file) and year0002 (the "aggregated" climatology file); - # a second tries to update the "individual" cache file - # because it appears to be incomplete but does not attempt - # to update the aggregated climatology file because no - # additional years were processed and the file was already - # complete for the span of years present - test3 = {'monthNames': 'Jan', - 'monthValues': [1], - 'yearsPerCacheFile': 2, - 'expectedSuffixes': ['years0002-0003', 'year0002'], - 'expectedModified': [True, False], - # weird value because first time step of Jan. missing in ds - 'expectedDays': 30.958333, - 'expectedMonths': 1, - 'refClimatology': None} - self.cache_climatologies_driver(test3, **setup) - - def test_jfm_2yr_climo_test4(self): - setup = self.cache_climatologies_setup() - # test4: same as test3 but with JFM - test4 = {'monthNames': 'JFM', - 'monthValues': constants.monthDictionary['JFM'], - 'yearsPerCacheFile': 2, - 'expectedSuffixes': ['years0002-0003', 'year0002'], - 'expectedModified': [True, False], - # weird value because first time step of Jan. missing in ds - 'expectedDays': 89.958333, - 'expectedMonths': 3, - 'refClimatology': setup['refClimatology']} - self.cache_climatologies_driver(test4, **setup) - - def cache_climatologies_driver(self, test, config, fieldName, - ds, remapper, calendar, **kwargs): - monthNames = test['monthNames'] - monthValues = test['monthValues'] - yearsPerCacheFile = test['yearsPerCacheFile'] - expectedSuffixes = test['expectedSuffixes'] - expectedModified = test['expectedModified'] - expectedDays = test['expectedDays'] - expectedMonths = test['expectedMonths'] - refClimatology = test['refClimatology'] - - climatologyDirectory = \ - get_mpas_climatology_dir_name( - config, fieldName, - remapper.sourceDescriptor.meshName) - - climatologyPrefix = '{}/mpaso_{}_climo'.format( - climatologyDirectory, monthNames) - - config.set('climatology', 'yearsPerCacheFile', - str(yearsPerCacheFile)) - # once without cache files - dsClimatology = cache_climatologies( - ds, monthValues, config, climatologyPrefix, calendar, - printProgress=True) - - if refClimatology is not None: - self.assertArrayApproxEqual(dsClimatology.mld.values, - refClimatology.mld.values) - - self.assertEqual(dsClimatology.attrs['totalMonths'], - expectedMonths) - self.assertApproxEqual(dsClimatology.attrs['totalDays'], - expectedDays) - dsClimatology.close() - - fingerprints = [] - for suffix in expectedSuffixes: - expectedClimatologyFileName = '{}/clim/mpas/mld_QU240/mpaso_' \ - '{}_climo_{}.nc'.format( - self.test_dir, monthNames, - suffix) - assert os.path.exists(expectedClimatologyFileName) - - dsClimatology = xarray.open_dataset(expectedClimatologyFileName) - fingerprints.append(dsClimatology.fingerprintClimo) - - # try it again with cache files saved - dsClimatology = cache_climatologies( - ds, monthValues, config, climatologyPrefix, calendar, - printProgress=True) - - if refClimatology is not None: - self.assertArrayApproxEqual(dsClimatology.mld.values, - refClimatology.mld.values) - - self.assertEqual(dsClimatology.attrs['totalMonths'], - expectedMonths) - self.assertApproxEqual(dsClimatology.attrs['totalDays'], - expectedDays) - dsClimatology.close() - - for index, suffix in enumerate(expectedSuffixes): - expectedClimatologyFileName = '{}/clim/mpas/mld_QU240/mpaso_' \ - '{}_climo_{}.nc'.format( - self.test_dir, monthNames, - suffix) - - dsClimatology = xarray.open_dataset(expectedClimatologyFileName) - fingerprintCheck = dsClimatology.fingerprintClimo - - # Check whether the given file was modified, and whether - # this was the expected result - fileWasModified = fingerprints[index] != fingerprintCheck - assert fileWasModified == expectedModified[index] - - # remove the cache file for the next try - os.remove(expectedClimatologyFileName) - # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/test/test_mpas_climatology_task.py b/mpas_analysis/test/test_mpas_climatology_task.py new file mode 100644 index 000000000..8fb1db36b --- /dev/null +++ b/mpas_analysis/test/test_mpas_climatology_task.py @@ -0,0 +1,189 @@ +""" +Unit test infrastructure for MpasClimatologyTask. + +Xylar Asay-Davis +""" + +import pytest +import tempfile +import shutil +import os + +from mpas_analysis.test import TestCase, loaddatadir +from mpas_analysis.configuration import MpasAnalysisConfigParser +from mpas_analysis.shared.climatology import MpasClimatologyTask, \ + RemapMpasClimatologySubtask +from mpas_analysis.shared import AnalysisTask +from mpas_analysis.shared.io.utility import build_config_full_path, \ + make_directories + + +@pytest.mark.usefixtures("loaddatadir") +class TestMpasClimatologyTask(TestCase): + def setUp(self): + # Create a temporary directory + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the directory after the test + shutil.rmtree(self.test_dir) + + def setup_config(self): + configPath = self.datadir.join('config.QU240') + config = MpasAnalysisConfigParser() + config.read(str(configPath)) + config.set('input', 'baseDirectory', str(self.datadir)) + config.set('output', 'baseDirectory', str(self.test_dir)) + return config + + def setup_task(self): + config = self.setup_config() + mpasClimatologyTask = MpasClimatologyTask(config=config, + componentName='ocean') + + mpasClimatologyTask.setup_and_check() + return mpasClimatologyTask + + def setup_subtask(self, mpasClimatologyTask): + parentTask = AnalysisTask( + config=mpasClimatologyTask.config, taskName='fake', + componentName=mpasClimatologyTask.componentName, + tags=['climatology']) + climatologyName = 'ssh' + variableList = ['timeMonthly_avg_ssh'] + seasons = [mpasClimatologyTask.seasons[0]] + + remapSubtask = RemapMpasClimatologySubtask(mpasClimatologyTask, + parentTask, + climatologyName, + variableList, seasons) + + remapSubtask.setup_and_check() + return remapSubtask + + def add_variables(self, mpasClimatologyTask): + variableList = ['timeMonthly_avg_ssh', 'timeMonthly_avg_tThreshMLD'] + seasons = ['JFM', 'JJA', 'ANN'] + mpasClimatologyTask.add_variables(variableList=variableList, + seasons=seasons) + + return variableList, seasons + + def test_add_variables(self): + mpasClimatologyTask = self.setup_task() + variableList, seasons = self.add_variables(mpasClimatologyTask) + + assert(variableList == mpasClimatologyTask.variableList) + assert(seasons == mpasClimatologyTask.seasons) + + # add a variable and season already in the list + mpasClimatologyTask.add_variables(variableList=[variableList[0]], + seasons=[seasons[-1]]) + + # make sure the lists still match (extra redundant varible and season + # weren't added) + assert(variableList == mpasClimatologyTask.variableList) + assert(seasons == mpasClimatologyTask.seasons) + + def test_get_file_name(self): + mpasClimatologyTask = self.setup_task() + variableList, seasons = self.add_variables(mpasClimatologyTask) + + fileName = mpasClimatologyTask.get_file_name(season='JFM') + assert(fileName == '{}/clim/mpas/unmasked_oQU240/' + 'mpaso_JFM_000201_000203_climo.nc'.format(str(self.test_dir))) + + def test_run_analysis(self): + mpasClimatologyTask = self.setup_task() + self.add_variables(mpasClimatologyTask) + + config = mpasClimatologyTask.config + logsDirectory = build_config_full_path(config, 'output', + 'logsSubdirectory') + make_directories(logsDirectory) + make_directories('{}/configs/'.format(logsDirectory)) + + mpasClimatologyTask.run(writeLogFile=False) + + for season in mpasClimatologyTask.seasons: + fileName = mpasClimatologyTask.get_file_name(season=season) + assert(os.path.exists(fileName)) + + def test_update_climatology_bounds_from_file_names(self): + mpasClimatologyTask = self.setup_task() + config = mpasClimatologyTask.config + + # first make sure the start and end years stay unchanged when we use + # the start and end years already in the config file + startYear = 2 + endYear = 2 + startDate = '{:04d}-01-01_00:00:00'.format(startYear) + endDate = '{:04d}-12-31_23:59:59'.format(endYear) + + mpasClimatologyTask._update_climatology_bounds_from_file_names() + + assert(mpasClimatologyTask.startYear == startYear) + assert(mpasClimatologyTask.endYear == endYear) + assert(mpasClimatologyTask.startDate == startDate) + assert(mpasClimatologyTask.endDate == endDate) + + # Now, set the the start and end years out of range and make sure they + # get changed back to the values that are in range + startYear = 1 + endYear = 5 + startDate = '{:04d}-01-01_00:00:00'.format(startYear) + endDate = '{:04d}-12-31_23:59:59'.format(endYear) + + config.set('climatology', 'startYear', str(startYear)) + config.set('climatology', 'endYear', str(endYear)) + config.set('climatology', 'startDate', startDate) + config.set('climatology', 'endDate', endDate) + + with pytest.warns(UserWarning): + mpasClimatologyTask._update_climatology_bounds_from_file_names() + + startYear = 2 + endYear = 2 + startDate = '{:04d}-01-01_00:00:00'.format(startYear) + endDate = '{:04d}-12-31_23:59:59'.format(endYear) + + assert(mpasClimatologyTask.startYear == startYear) + assert(mpasClimatologyTask.endYear == endYear) + assert(mpasClimatologyTask.startDate == startDate) + assert(mpasClimatologyTask.endDate == endDate) + + def test_subtask_run_analysis(self): + mpasClimatologyTask = self.setup_task() + self.add_variables(mpasClimatologyTask) + remapSubtask = self.setup_subtask(mpasClimatologyTask) + + config = mpasClimatologyTask.config + logsDirectory = build_config_full_path(config, 'output', + 'logsSubdirectory') + make_directories(logsDirectory) + make_directories('{}/configs/'.format(logsDirectory)) + + mpasClimatologyTask.run(writeLogFile=False) + remapSubtask.run(writeLogFile=False) + + for season in remapSubtask.seasons: + for stage in ['masked', 'remapped']: + fileName = remapSubtask.get_file_name( + season=season, stage=stage, + comparisonGridName='latlon') + assert(os.path.exists(fileName)) + + def test_subtask_get_file_name(self): + mpasClimatologyTask = self.setup_task() + variableList, seasons = self.add_variables(mpasClimatologyTask) + remapSubtask = self.setup_subtask(mpasClimatologyTask) + + fileName = remapSubtask.get_file_name(season='JFM', stage='masked', + comparisonGridName='latlon') + assert(fileName == '{}/clim/mpas/masked/ssh_oQU240/' + 'mpaso_JFM_000201_000203_climo.nc'.format(str(self.test_dir))) + + fileName = remapSubtask.get_file_name(season='JFM', stage='remapped', + comparisonGridName='latlon') + assert(fileName == '{}/clim/mpas/remapped/ssh_oQU240_to_0.5x0.5degree/' + 'mpaso_JFM_000201_000203_climo.nc'.format(str(self.test_dir))) diff --git a/mpas_analysis/test/test_mpas_climatology_task/config.QU240 b/mpas_analysis/test/test_mpas_climatology_task/config.QU240 new file mode 100644 index 000000000..41b8db0c7 --- /dev/null +++ b/mpas_analysis/test/test_mpas_climatology_task/config.QU240 @@ -0,0 +1,30 @@ +[runs] +mainRunName = runName + +[execute] +parallelTaskCount = 1 +ncclimoParallelMode = serial + +[input] +baseDirectory = /dir/for/model/output +runSubdirectory = . +oceanHistorySubdirectory = . +oceanNamelistFileName = mpas-o_in +oceanStreamsFileName = streams.ocean +mpasMeshName = oQU240 + +[output] +baseDirectory = /dir/for/analysis/output +plotsSubdirectory = plots +logsSubdirectory = logs +mpasClimatologySubdirectory = clim/mpas +mappingSubdirectory = mapping + +[climatology] +startYear = 2 +endYear = 2 +comparisonLatResolution = 0.5 +comparisonLonResolution = 0.5 +mpasInterpolationMethod = bilinear +useNcremap = True +renormalizationThreshold = 0.01 diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpas-o_in b/mpas_analysis/test/test_mpas_climatology_task/mpas-o_in new file mode 100644 index 000000000..0d8ba89a3 --- /dev/null +++ b/mpas_analysis/test/test_mpas_climatology_task/mpas-o_in @@ -0,0 +1,1092 @@ +&run_modes + config_ocean_run_mode = 'forward' +/ +&time_management + config_calendar_type = 'gregorian_noleap' + config_do_restart = .true. + config_restart_timestamp_name = 'rpointer.ocn' + config_start_time = 'file' +/ +&io + config_pio_num_iotasks = 0 + config_pio_stride = 1 + config_write_output_on_startup = .true. +/ +&decomposition + config_block_decomp_file_prefix = '/project/projectdirs/acme/inputdata/ocn/mpas-o/oQU240/mpas-o.graph.info.151209.part.' + config_explicit_proc_decomp = .false. + config_num_halos = 3 + config_number_of_blocks = 0 + config_proc_decomp_file_prefix = 'graph.info.part.' +/ +&init_setup + config_expand_sphere = .false. + config_init_configuration = 'none' + config_realistic_coriolis_parameter = .false. + config_vert_levels = -1 + config_vertical_grid = 'uniform' + config_write_cull_cell_mask = .true. +/ +&cvtgenerator + config_1dcvtgenerator_dzseed = 1.2 + config_1dcvtgenerator_stretch1 = 1.0770 + config_1dcvtgenerator_stretch2 = 1.0275 +/ +&init_ssh_and_landicepressure + config_iterative_init_variable = 'landIcePressure' +/ +&time_integration + config_dt = '01:00:00' + config_time_integrator = 'split_explicit' +/ +&ale_vertical_grid + config_dzdk_positive = .false. + config_max_thickness_factor = 6.0 + config_min_thickness = 1.0 + config_use_min_max_thickness = .false. + config_vert_coord_movement = 'uniform_stretching' +/ +&ale_frequency_filtered_thickness + config_highfreqthick_del2 = 100.0 + config_highfreqthick_restore_time = 30.0 + config_thickness_filter_timescale = 5.0 + config_use_freq_filtered_thickness = .false. + config_use_highfreqthick_del2 = .false. + config_use_highfreqthick_restore = .false. +/ +&partial_bottom_cells + config_alter_ics_for_pbcs = .false. + config_min_pbc_fraction = 0.10 + config_pbc_alteration_type = 'full_cell' +/ +&hmix + config_apvm_scale_factor = 0.0 + config_hmix_scalewithmesh = .false. + config_maxmeshdensity = -1.0 +/ +&hmix_del2 + config_mom_del2 = 10.0 + config_tracer_del2 = 10.0 + config_use_mom_del2 = .false. + config_use_tracer_del2 = .false. +/ +&hmix_del4 + config_mom_del4 = 2.0e14 + config_mom_del4_div_factor = 1.0 + config_tracer_del4 = 0.0 + config_use_mom_del4 = .true. + config_use_tracer_del4 = .false. +/ +&hmix_leith + config_leith_dx = 15000.0 + config_leith_parameter = 1.0 + config_leith_visc2_max = 2.5e3 + config_use_leith_del2 = .false. +/ +&mesoscale_eddy_parameterization + config_gravwavespeed_trunc = 0.3 + config_max_relative_slope = 0.01 + config_redi_bottom_layer_tapering_depth = 0.0 + config_redi_kappa = 0.0 + config_redi_surface_layer_tapering_extent = 0.0 + config_standardgm_tracer_kappa = 600.0 + config_use_redi_bottom_layer_tapering = .false. + config_use_redi_surface_layer_tapering = .false. + config_use_standardgm = .true. +/ +&hmix_del2_tensor + config_mom_del2_tensor = 10.0 + config_use_mom_del2_tensor = .false. +/ +&hmix_del4_tensor + config_mom_del4_tensor = 5.0e13 + config_use_mom_del4_tensor = .false. +/ +&rayleigh_damping + config_rayleigh_damping_coeff = 0.0 + config_rayleigh_friction = .false. +/ +&vmix + config_convective_diff = 1.0 + config_convective_visc = 1.0 +/ +&vmix_const + config_use_const_diff = .false. + config_use_const_visc = .false. + config_vert_diff = 1.0e-5 + config_vert_visc = 1.0e-4 +/ +&vmix_rich + config_bkrd_vert_diff = 1.0e-5 + config_bkrd_vert_visc = 1.0e-4 + config_rich_mix = 0.005 + config_use_rich_diff = .false. + config_use_rich_visc = .false. +/ +&vmix_tanh + config_max_diff_tanh = 2.5e-2 + config_max_visc_tanh = 2.5e-1 + config_min_diff_tanh = 1.0e-5 + config_min_visc_tanh = 1.0e-4 + config_use_tanh_diff = .false. + config_use_tanh_visc = .false. + config_zmid_tanh = -100 + config_zwidth_tanh = 100 +/ +&cvmix + config_cvmix_background_diffusion = 1.0e-5 + config_cvmix_background_viscosity = 1.0e-4 + config_cvmix_convective_basedonbvf = .true. + config_cvmix_convective_diffusion = 1.0 + config_cvmix_convective_triggerbvf = 0.0 + config_cvmix_convective_viscosity = 1.0 + config_cvmix_kpp_boundary_layer_depth = 30.0 + config_cvmix_kpp_criticalbulkrichardsonnumber = 0.25 + config_cvmix_kpp_ekmanobl = .false. + config_cvmix_kpp_interpolationomltype = 'quadratic' + config_cvmix_kpp_matching = 'SimpleShapes' + config_cvmix_kpp_monobobl = .false. + config_cvmix_kpp_stop_obl_search = 100.0 + config_cvmix_kpp_surface_layer_averaging = 5.0 + config_cvmix_kpp_surface_layer_extent = 0.1 + config_cvmix_kpp_use_enhanced_diff = .true. + config_cvmix_num_ri_smooth_loops = 2 + config_cvmix_prandtl_number = 1.0 + config_cvmix_shear_kpp_exp = 3 + config_cvmix_shear_kpp_nu_zero = 0.005 + config_cvmix_shear_kpp_ri_zero = 0.7 + config_cvmix_shear_mixing_scheme = 'KPP' + config_cvmix_shear_pp_alpha = 5.0 + config_cvmix_shear_pp_exp = 2.0 + config_cvmix_shear_pp_nu_zero = 0.005 + config_use_cvmix = .true. + config_use_cvmix_background = .true. + config_use_cvmix_convection = .true. + config_use_cvmix_double_diffusion = .false. + config_use_cvmix_fixed_boundary_layer = .false. + config_use_cvmix_kpp = .true. + config_use_cvmix_shear = .true. + config_use_cvmix_tidal_mixing = .false. + configure_cvmix_kpp_minimum_obl_under_sea_ice = 10.0 +/ +&forcing + config_flux_attenuation_coefficient = 0.001 + config_flux_attenuation_coefficient_runoff = 10.0 + config_use_bulk_thickness_flux = .true. + config_use_bulk_wind_stress = .true. +/ +&coupling + config_ssh_grad_relax_timescale = 86400.0 +/ +&shortwaveradiation + config_forcing_restart_file = 'Restart_forcing_time_stamp' + config_jerlov_water_type = 3 + config_surface_buoyancy_depth = 1 + config_sw_absorption_type = 'jerlov' +/ +&frazil_ice + config_frazil_fractional_thickness_limit = 0.1 + config_frazil_heat_of_fusion = 3.337e5 + config_frazil_ice_density = 1000.0 + config_frazil_in_open_ocean = .true. + config_frazil_land_ice_reference_salinity = 0.0 + config_frazil_maximum_depth = 100.0 + config_frazil_maximum_freezing_temperature = 0.0 + config_frazil_sea_ice_reference_salinity = 4.0 + config_frazil_under_land_ice = .true. + config_frazil_use_surface_pressure = .false. + config_specific_heat_sea_water = 3.996e3 + config_use_frazil_ice_formation = .true. +/ +&land_ice_fluxes + config_land_ice_flux_attenuation_coefficient = 10.0 + config_land_ice_flux_boundarylayerneighborweight = 0.0 + config_land_ice_flux_boundarylayerthickness = 10.0 + config_land_ice_flux_cp_ice = 2.009e3 + config_land_ice_flux_formulation = 'Jenkins' + config_land_ice_flux_isomip_gammat = 1e-4 + config_land_ice_flux_jenkins_heat_transfer_coefficient = 0.011 + config_land_ice_flux_jenkins_salt_transfer_coefficient = 3.1e-4 + config_land_ice_flux_mode = 'off' + config_land_ice_flux_rho_ice = 918 + config_land_ice_flux_rms_tidal_velocity = 5e-2 + config_land_ice_flux_topdragcoeff = 2.5e-3 + config_land_ice_flux_usehollandjenkinsadvdiff = .false. +/ +&advection + config_coef_3rd_order = 0.25 + config_horiz_tracer_adv_order = 3 + config_monotonic = .true. + config_vert_tracer_adv = 'stencil' + config_vert_tracer_adv_order = 3 +/ +&bottom_drag + config_bottom_drag_coeff = 1.0e-3 +/ +&ocean_constants + config_density0 = 1026.0 +/ +&pressure_gradient + config_common_level_weight = 0.5 + config_pressure_gradient_type = 'Jacobian_from_TS' +/ +&eos + config_eos_type = 'jm' + config_land_ice_cavity_freezing_temperature_coeff_0 = 6.22e-2 + config_land_ice_cavity_freezing_temperature_coeff_p = -7.43e-8 + config_land_ice_cavity_freezing_temperature_coeff_ps = -1.74e-10 + config_land_ice_cavity_freezing_temperature_coeff_s = -5.63e-2 + config_land_ice_cavity_freezing_temperature_reference_pressure = 0.0 + config_open_ocean_freezing_temperature_coeff_0 = -1.8 + config_open_ocean_freezing_temperature_coeff_p = 0.0 + config_open_ocean_freezing_temperature_coeff_ps = 0.0 + config_open_ocean_freezing_temperature_coeff_s = 0.0 + config_open_ocean_freezing_temperature_reference_pressure = 0.0 +/ +&eos_linear + config_eos_linear_alpha = 0.2 + config_eos_linear_beta = 0.8 + config_eos_linear_densityref = 1000.0 + config_eos_linear_sref = 35.0 + config_eos_linear_tref = 5.0 +/ +&split_explicit_ts + config_btr_dt = '0000_00:03:00' + config_btr_gam1_velwt1 = 0.5 + config_btr_gam2_sshwt1 = 1.0 + config_btr_gam3_velwt2 = 1.0 + config_btr_solve_ssh2 = .false. + config_btr_subcycle_loop_factor = 2 + config_n_bcl_iter_beg = 1 + config_n_bcl_iter_end = 2 + config_n_bcl_iter_mid = 2 + config_n_btr_cor_iter = 2 + config_n_ts_iter = 2 + config_vel_correction = .true. +/ +&testing + config_conduct_tests = .false. + config_tensor_test_function = 'sph_uCosCos' + config_test_tensors = .false. +/ +&debug + config_check_ssh_consistency = .true. + config_check_tracer_monotonicity = .false. + config_check_zlevel_consistency = .false. + config_disable_redi_horizontal_term1 = .false. + config_disable_redi_horizontal_term2 = .false. + config_disable_redi_horizontal_term3 = .false. + config_disable_redi_k33 = .false. + config_disable_thick_all_tend = .false. + config_disable_thick_hadv = .false. + config_disable_thick_sflux = .false. + config_disable_thick_vadv = .false. + config_disable_tr_adv = .false. + config_disable_tr_all_tend = .false. + config_disable_tr_hmix = .false. + config_disable_tr_nonlocalflux = .false. + config_disable_tr_sflux = .false. + config_disable_tr_vmix = .false. + config_disable_vel_all_tend = .false. + config_disable_vel_coriolis = .false. + config_disable_vel_hmix = .false. + config_disable_vel_pgrad = .false. + config_disable_vel_surface_stress = .false. + config_disable_vel_vadv = .false. + config_disable_vel_vmix = .false. + config_filter_btr_mode = .false. + config_include_ke_vertex = .false. + config_prescribe_thickness = .false. + config_prescribe_velocity = .false. + config_read_nearest_restart = .false. +/ +&constrain_haney_number + config_rx1_horiz_smooth_open_ocean_cells = 20 + config_rx1_horiz_smooth_weight = 1.0 + config_rx1_init_inner_weight = 0.1 + config_rx1_inner_iter_count = 10 + config_rx1_max = 5.0 + config_rx1_min_layer_thickness = 1.0 + config_rx1_min_levels = 3 + config_rx1_outer_iter_count = 20 + config_rx1_slope_weight = 1e-1 + config_rx1_vert_smooth_weight = 1.0 + config_rx1_zstar_weight = 1.0 + config_use_rx1_constraint = .false. +/ +&baroclinic_channel + config_baroclinic_channel_bottom_depth = 1000.0 + config_baroclinic_channel_bottom_temperature = 10.1 + config_baroclinic_channel_coriolis_parameter = -1.2e-4 + config_baroclinic_channel_gradient_width_dist = 40e3 + config_baroclinic_channel_gradient_width_frac = 0.08 + config_baroclinic_channel_salinity = 35.0 + config_baroclinic_channel_surface_temperature = 13.1 + config_baroclinic_channel_temperature_difference = 1.2 + config_baroclinic_channel_use_distances = .false. + config_baroclinic_channel_vert_levels = 20 +/ +&lock_exchange + config_lock_exchange_bottom_depth = 20.0 + config_lock_exchange_cold_temperature = 5.0 + config_lock_exchange_direction = 'y' + config_lock_exchange_isopycnal_min_thickness = 0.01 + config_lock_exchange_layer_type = 'z-level' + config_lock_exchange_salinity = 35.0 + config_lock_exchange_vert_levels = 20 + config_lock_exchange_warm_temperature = 30.0 +/ +&internal_waves + config_internal_waves_amplitude_width_dist = 50e3 + config_internal_waves_amplitude_width_frac = 0.33 + config_internal_waves_bottom_depth = 500.0 + config_internal_waves_bottom_temperature = 10.1 + config_internal_waves_isopycnal_displacement = 125.0 + config_internal_waves_layer_type = 'z-level' + config_internal_waves_salinity = 35.0 + config_internal_waves_surface_temperature = 20.1 + config_internal_waves_temperature_difference = 2.0 + config_internal_waves_use_distances = false + config_internal_waves_vert_levels = 20 +/ +&overflow + config_overflow_bottom_depth = 2000.0 + config_overflow_domain_temperature = 20.0 + config_overflow_isopycnal_min_thickness = 0.01 + config_overflow_layer_type = 'z-level' + config_overflow_plug_temperature = 10.0 + config_overflow_plug_width_dist = 20e3 + config_overflow_plug_width_frac = 0.10 + config_overflow_ridge_depth = 500.0 + config_overflow_salinity = 35.0 + config_overflow_slope_center_dist = 40e3 + config_overflow_slope_center_frac = 0.20 + config_overflow_slope_width_dist = 7e3 + config_overflow_slope_width_frac = 0.05 + config_overflow_use_distances = false + config_overflow_vert_levels = 100 +/ +&global_ocean + config_global_ocean_chlorophyll_varname = 'none' + config_global_ocean_clearsky_varname = 'none' + config_global_ocean_cull_inland_seas = .true. + config_global_ocean_deepen_critical_passages = .true. + config_global_ocean_depress_by_land_ice = .false. + config_global_ocean_depth_conversion_factor = 1.0 + config_global_ocean_depth_dimname = 'none' + config_global_ocean_depth_file = 'none' + config_global_ocean_depth_varname = 'none' + config_global_ocean_ecosys_depth_conversion_factor = 1.0 + config_global_ocean_ecosys_depth_varname = 'none' + config_global_ocean_ecosys_file = 'unknown' + config_global_ocean_ecosys_forcing_file = 'unknown' + config_global_ocean_ecosys_forcing_time_dimname = 'none' + config_global_ocean_ecosys_lat_varname = 'none' + config_global_ocean_ecosys_latlon_degrees = .true. + config_global_ocean_ecosys_lon_varname = 'none' + config_global_ocean_ecosys_method = 'bilinear_interpolation' + config_global_ocean_ecosys_ndepth_dimname = 'none' + config_global_ocean_ecosys_nlat_dimname = 'none' + config_global_ocean_ecosys_nlon_dimname = 'none' + config_global_ocean_ecosys_vert_levels = -1 + config_global_ocean_interior_restore_rate = 1.0e-7 + config_global_ocean_land_ice_topo_draft_varname = 'none' + config_global_ocean_land_ice_topo_file = 'none' + config_global_ocean_land_ice_topo_grounded_frac_varname = 'none' + config_global_ocean_land_ice_topo_ice_frac_varname = 'none' + config_global_ocean_land_ice_topo_lat_varname = 'none' + config_global_ocean_land_ice_topo_latlon_degrees = .true. + config_global_ocean_land_ice_topo_lon_varname = 'none' + config_global_ocean_land_ice_topo_nlat_dimname = 'none' + config_global_ocean_land_ice_topo_nlon_dimname = 'none' + config_global_ocean_land_ice_topo_thickness_varname = 'none' + config_global_ocean_minimum_depth = 15 + config_global_ocean_piston_velocity = 5.0e-5 + config_global_ocean_salinity_file = 'none' + config_global_ocean_salinity_varname = 'none' + config_global_ocean_smooth_ecosys_iterations = 0 + config_global_ocean_smooth_topography = .true. + config_global_ocean_smooth_ts_iterations = 0 + config_global_ocean_swdata_file = 'none' + config_global_ocean_swdata_lat_varname = 'none' + config_global_ocean_swdata_latlon_degrees = .true. + config_global_ocean_swdata_lon_varname = 'none' + config_global_ocean_swdata_method = 'bilinear_interpolation' + config_global_ocean_swdata_nlat_dimname = 'none' + config_global_ocean_swdata_nlon_dimname = 'none' + config_global_ocean_temperature_file = 'none' + config_global_ocean_temperature_varname = 'none' + config_global_ocean_topography_file = 'none' + config_global_ocean_topography_has_ocean_frac = .false. + config_global_ocean_topography_lat_varname = 'none' + config_global_ocean_topography_latlon_degrees = .true. + config_global_ocean_topography_lon_varname = 'none' + config_global_ocean_topography_method = 'bilinear_interpolation' + config_global_ocean_topography_nlat_dimname = 'none' + config_global_ocean_topography_nlon_dimname = 'none' + config_global_ocean_topography_ocean_frac_varname = 'none' + config_global_ocean_topography_varname = 'none' + config_global_ocean_tracer_depth_conversion_factor = 1.0 + config_global_ocean_tracer_depth_varname = 'none' + config_global_ocean_tracer_lat_varname = 'none' + config_global_ocean_tracer_latlon_degrees = .true. + config_global_ocean_tracer_lon_varname = 'none' + config_global_ocean_tracer_method = 'bilinear_interpolation' + config_global_ocean_tracer_ndepth_dimname = 'none' + config_global_ocean_tracer_nlat_dimname = 'none' + config_global_ocean_tracer_nlon_dimname = 'none' + config_global_ocean_tracer_vert_levels = -1 + config_global_ocean_windstress_conversion_factor = 1 + config_global_ocean_windstress_file = 'none' + config_global_ocean_windstress_lat_varname = 'none' + config_global_ocean_windstress_latlon_degrees = .true. + config_global_ocean_windstress_lon_varname = 'none' + config_global_ocean_windstress_meridional_varname = 'none' + config_global_ocean_windstress_method = 'bilinear_interpolation' + config_global_ocean_windstress_nlat_dimname = 'none' + config_global_ocean_windstress_nlon_dimname = 'none' + config_global_ocean_windstress_zonal_varname = 'none' + config_global_ocean_zenithangle_varname = 'none' +/ +&cvmix_wswsbf + config_cvmix_wswsbf_bottom_depth = 400.0 + config_cvmix_wswsbf_coriolis_parameter = 1.0e-4 + config_cvmix_wswsbf_evaporation_flux = 0.0 + config_cvmix_wswsbf_interior_salinity_restoring_rate = 1.0e-6 + config_cvmix_wswsbf_interior_temperature_restoring_rate = 1.0e-6 + config_cvmix_wswsbf_latent_heat_flux = 0.0 + config_cvmix_wswsbf_max_windstress = 0.10 + config_cvmix_wswsbf_mixed_layer_depth_salinity = 0.0 + config_cvmix_wswsbf_mixed_layer_depth_temperature = 0.0 + config_cvmix_wswsbf_mixed_layer_salinity_change = 0.0 + config_cvmix_wswsbf_mixed_layer_temperature_change = 0.0 + config_cvmix_wswsbf_rain_flux = 0.0 + config_cvmix_wswsbf_salinity_gradient = 0.0 + config_cvmix_wswsbf_salinity_gradient_mixed_layer = 0.0 + config_cvmix_wswsbf_salinity_piston_velocity = 4.0e-6 + config_cvmix_wswsbf_sensible_heat_flux = 0.0 + config_cvmix_wswsbf_shortwave_heat_flux = 0.0 + config_cvmix_wswsbf_surface_restoring_salinity = 35.0 + config_cvmix_wswsbf_surface_restoring_temperature = 15.0 + config_cvmix_wswsbf_surface_salinity = 35.0 + config_cvmix_wswsbf_surface_temperature = 15.0 + config_cvmix_wswsbf_temperature_gradient = 0.01 + config_cvmix_wswsbf_temperature_gradient_mixed_layer = 0.0 + config_cvmix_wswsbf_temperature_piston_velocity = 4.0e-6 + config_cvmix_wswsbf_vert_levels = 100 + config_cvmix_wswsbf_vertical_grid = 'uniform' +/ +&iso + config_iso_acc_wind = 0.2 + config_iso_asf_wind = -0.05 + config_iso_cont_slope_flag = .true. + config_iso_depression_center_lon = 60 + config_iso_depression_depth = 800 + config_iso_depression_flag = .true. + config_iso_depression_north_lat = -65 + config_iso_depression_south_lat = -72 + config_iso_depression_width = 480000 + config_iso_embayment_center_lat = -71 + config_iso_embayment_center_lon = 60 + config_iso_embayment_depth = 2000 + config_iso_embayment_flag = .true. + config_iso_embayment_radius = 500000 + config_iso_heat_flux_lat_mn = -53 + config_iso_heat_flux_lat_sm = -65 + config_iso_heat_flux_lat_ss = -70 + config_iso_heat_flux_middle = 10 + config_iso_heat_flux_north = -5 + config_iso_heat_flux_region1 = -5 + config_iso_heat_flux_region1_flag = false + config_iso_heat_flux_region1_radius = 300000 + config_iso_heat_flux_region2 = -5 + config_iso_heat_flux_region2_flag = false + config_iso_heat_flux_region2_radius = 240000 + config_iso_heat_flux_south = -5 + config_iso_initial_temp_h0 = 1200 + config_iso_initial_temp_h1 = 500 + config_iso_initial_temp_latn = -50 + config_iso_initial_temp_lats = -75 + config_iso_initial_temp_mt = 0.000075 + config_iso_initial_temp_t1 = 3.5 + config_iso_initial_temp_t2 = 4.0 + config_iso_main_channel_depth = 4000.0 + config_iso_max_cont_slope = 0.01 + config_iso_north_wall_lat = -50 + config_iso_plateau_center_lat = -58 + config_iso_plateau_center_lon = 300 + config_iso_plateau_flag = .true. + config_iso_plateau_height = 2000 + config_iso_plateau_radius = 200000 + config_iso_plateau_slope_width = 1000000 + config_iso_region1_center_lat = -75 + config_iso_region1_center_lon = 60 + config_iso_region2_center_lat = -71 + config_iso_region2_center_lon = 150 + config_iso_region3_center_lat = -71 + config_iso_region3_center_lon = 240 + config_iso_region4_center_lat = -71 + config_iso_region4_center_lon = 330 + config_iso_ridge_center_lon = 180 + config_iso_ridge_flag = .true. + config_iso_ridge_height = 2000.0 + config_iso_ridge_width = 2000000 + config_iso_salinity = 35.0 + config_iso_shelf_depth = 500 + config_iso_shelf_flag = .true. + config_iso_shelf_width = 120000 + config_iso_south_wall_lat = -70 + config_iso_surface_temperature_piston_velocity = 5.787e-5 + config_iso_temperature_restore_lcx1 = 600000 + config_iso_temperature_restore_lcx2 = 600000 + config_iso_temperature_restore_lcx3 = 600000 + config_iso_temperature_restore_lcx4 = 600000 + config_iso_temperature_restore_lcy1 = 600000 + config_iso_temperature_restore_lcy2 = 250000 + config_iso_temperature_restore_lcy3 = 250000 + config_iso_temperature_restore_lcy4 = 250000 + config_iso_temperature_restore_region1_flag = .true. + config_iso_temperature_restore_region2_flag = .true. + config_iso_temperature_restore_region3_flag = .true. + config_iso_temperature_restore_region4_flag = .true. + config_iso_temperature_restore_t1 = -1 + config_iso_temperature_restore_t2 = -1 + config_iso_temperature_restore_t3 = -1 + config_iso_temperature_restore_t4 = -1 + config_iso_temperature_sponge_h1 = 1000 + config_iso_temperature_sponge_l1 = 120000 + config_iso_temperature_sponge_t1 = 10 + config_iso_temperature_sponge_tau1 = 10.0 + config_iso_vert_levels = 100 + config_iso_wind_stress_max = 0.01 + config_iso_wind_trans = -65 +/ +&soma + config_soma_bottom_depth = 2500.0 + config_soma_center_latitude = 35.0 + config_soma_center_longitude = 0.0 + config_soma_density_difference = 4.0 + config_soma_density_difference_linear = 0.05 + config_soma_domain_width = 1.25e6 + config_soma_phi = 0.1 + config_soma_ref_density = 1000.0 + config_soma_restoring_temp_piston_vel = 1.0e-5 + config_soma_shelf_depth = 100.0 + config_soma_shelf_width = -0.4 + config_soma_surface_salinity = 33.0 + config_soma_surface_temp_restoring_at_center_latitude = 7.5 + config_soma_surface_temp_restoring_latitude_gradient = 0.5 + config_soma_surface_temperature = 20.0 + config_soma_thermocline_depth = 300.0 + config_soma_use_surface_temp_restoring = false + config_soma_vert_levels = 100 +/ +&ziso + config_ziso_add_easterly_wind_stress_asf = false + config_ziso_antarctic_shelf_front_width = 600000 + config_ziso_bottom_depth = 2500.0 + config_ziso_coriolis_gradient = 1e-11 + config_ziso_frazil_enable = false + config_ziso_frazil_temperature_anomaly = -3.0 + config_ziso_initial_temp_h1 = 300.0 + config_ziso_initial_temp_mt = 7.5e-5 + config_ziso_initial_temp_t1 = 6.0 + config_ziso_initial_temp_t2 = 3.6 + config_ziso_mean_restoring_temp = 3.0 + config_ziso_meridional_extent = 2.0e6 + config_ziso_reference_coriolis = -1e-4 + config_ziso_restoring_sponge_l = 8.0e4 + config_ziso_restoring_temp_dev_ta = 2.0 + config_ziso_restoring_temp_dev_tb = 2.0 + config_ziso_restoring_temp_piston_vel = 1.93e-5 + config_ziso_restoring_temp_tau = 30.0 + config_ziso_restoring_temp_ze = 1250.0 + config_ziso_shelf_depth = 500.0 + config_ziso_slope_center_position = 5.0e5 + config_ziso_slope_half_width = 1.0e5 + config_ziso_use_slopping_bathymetry = false + config_ziso_vert_levels = 100 + config_ziso_wind_stress_max = 0.2 + config_ziso_wind_stress_shelf_front_max = -0.05 + config_ziso_wind_transition_position = 800000.0 + config_ziso_zonal_extent = 1.0e6 +/ +&sub_ice_shelf_2d + config_sub_ice_shelf_2d_bottom_depth = 2000.0 + config_sub_ice_shelf_2d_bottom_salinity = 34.7 + config_sub_ice_shelf_2d_cavity_thickness = 25.0 + config_sub_ice_shelf_2d_edge_width = 15.0e3 + config_sub_ice_shelf_2d_slope_height = 500.0 + config_sub_ice_shelf_2d_surface_salinity = 34.5 + config_sub_ice_shelf_2d_temperature = 1.0 + config_sub_ice_shelf_2d_vert_levels = 20 + config_sub_ice_shelf_2d_y1 = 30.0e3 + config_sub_ice_shelf_2d_y2 = 60.0e3 +/ +&periodic_planar + config_periodic_planar_bottom_depth = 2500.0 + config_periodic_planar_velocity_strength = 1.0 + config_periodic_planar_vert_levels = 100 +/ +&ecosys_column + config_ecosys_column_bottom_depth = 6000.0 + config_ecosys_column_ecosys_filename = 'unknown' + config_ecosys_column_ts_filename = 'unknown' + config_ecosys_column_vert_levels = 100 + config_ecosys_column_vertical_grid = '100layerACMEv1' +/ +&sea_mount + config_sea_mount_bottom_depth = 5000.0 + config_sea_mount_coriolis_parameter = -1.0e-4 + config_sea_mount_density_alpha = 0.2 + config_sea_mount_density_coef_exp = 1028 + config_sea_mount_density_coef_linear = 1024 + config_sea_mount_density_depth_exp = 500 + config_sea_mount_density_depth_linear = 4500 + config_sea_mount_density_gradient_exp = 3.0 + config_sea_mount_density_gradient_linear = 0.1 + config_sea_mount_density_ref = 1028 + config_sea_mount_density_tref = 5.0 + config_sea_mount_height = 4500.0 + config_sea_mount_layer_type = 'sigma' + config_sea_mount_radius = 10.0e3 + config_sea_mount_salinity = 35.0 + config_sea_mount_stratification_type = 'exponential' + config_sea_mount_vert_levels = 10 + config_sea_mount_width = 40.0e3 +/ +&isomip + config_isomip_bottom_depth = -900.0 + config_isomip_coriolis_parameter = -1.4e-4 + config_isomip_eastern_boundary = 500e3 + config_isomip_ice_fraction1 = 1.0 + config_isomip_ice_fraction2 = 1.0 + config_isomip_ice_fraction3 = 1.0 + config_isomip_northern_boundary = 1000e3 + config_isomip_restoring_salinity = 34.4 + config_isomip_restoring_temperature = -1.9 + config_isomip_salinity = 34.4 + config_isomip_salinity_piston_velocity = 1.157e-5 + config_isomip_southern_boundary = 0.0 + config_isomip_temperature = -1.9 + config_isomip_temperature_piston_velocity = 1.157e-5 + config_isomip_vert_levels = 30 + config_isomip_vertical_level_distribution = 'constant' + config_isomip_western_boundary = 0.0 + config_isomip_y1 = 0.0 + config_isomip_y2 = 400e3 + config_isomip_y3 = 1000e3 + config_isomip_z1 = -700.0 + config_isomip_z2 = -200.0 + config_isomip_z3 = -200.0 +/ +&isomip_plus + config_isomip_plus_coriolis_parameter = -1.409e-4 + config_isomip_plus_effective_density = 1026. + config_isomip_plus_init_bot_sal = 34.5 + config_isomip_plus_init_bot_temp = -1.9 + config_isomip_plus_init_top_sal = 33.8 + config_isomip_plus_init_top_temp = -1.9 + config_isomip_plus_max_bottom_depth = -720.0 + config_isomip_plus_min_column_thickness = 10.0 + config_isomip_plus_min_ocean_fraction = 0.5 + config_isomip_plus_minimum_levels = 3 + config_isomip_plus_restore_bot_sal = 34.7 + config_isomip_plus_restore_bot_temp = 1.0 + config_isomip_plus_restore_evap_rate = 200 + config_isomip_plus_restore_rate = 10.0 + config_isomip_plus_restore_top_sal = 33.8 + config_isomip_plus_restore_top_temp = -1.9 + config_isomip_plus_restore_xmax = 800.0e3 + config_isomip_plus_restore_xmin = 790.0e3 + config_isomip_plus_topography_file = 'input_geometry_processed.nc' + config_isomip_plus_vert_levels = 36 + config_isomip_plus_vertical_level_distribution = 'constant' +/ +&tracer_forcing_activetracers + config_salinity_restoring_constant_piston_velocity = 0.0 + config_salinity_restoring_max_difference = 0.5 + config_use_activetracers = .true. + config_use_activetracers_exponential_decay = .false. + config_use_activetracers_idealage_forcing = .false. + config_use_activetracers_interior_restoring = .false. + config_use_activetracers_surface_bulk_forcing = .true. + config_use_activetracers_surface_restoring = .false. + config_use_activetracers_ttd_forcing = .false. + config_use_surface_salinity_monthly_restoring = .false. +/ +&tracer_forcing_debugtracers + config_use_debugtracers = .false. + config_use_debugtracers_exponential_decay = .false. + config_use_debugtracers_idealage_forcing = .false. + config_use_debugtracers_interior_restoring = .false. + config_use_debugtracers_surface_bulk_forcing = .false. + config_use_debugtracers_surface_restoring = .false. + config_use_debugtracers_ttd_forcing = .false. +/ +&tracer_forcing_ecosystracers + config_use_ecosystracers = .false. + config_use_ecosystracers_exponential_decay = .false. + config_use_ecosystracers_idealage_forcing = .false. + config_use_ecosystracers_interior_restoring = .false. + config_use_ecosystracers_sea_ice_coupling = .false. + config_use_ecosystracers_surface_bulk_forcing = .false. + config_use_ecosystracers_surface_restoring = .false. + config_use_ecosystracers_surface_value = .false. + config_use_ecosystracers_ttd_forcing = .false. +/ +&tracer_forcing_dmstracers + config_use_dmstracers = .false. + config_use_dmstracers_exponential_decay = .false. + config_use_dmstracers_idealage_forcing = .false. + config_use_dmstracers_interior_restoring = .false. + config_use_dmstracers_sea_ice_coupling = .false. + config_use_dmstracers_surface_bulk_forcing = .false. + config_use_dmstracers_surface_restoring = .false. + config_use_dmstracers_surface_value = .false. + config_use_dmstracers_ttd_forcing = .false. +/ +&tracer_forcing_macromoleculestracers + config_use_macromoleculestracers = .false. + config_use_macromoleculestracers_exponential_decay = .false. + config_use_macromoleculestracers_idealage_forcing = .false. + config_use_macromoleculestracers_interior_restoring = .false. + config_use_macromoleculestracers_sea_ice_coupling = .false. + config_use_macromoleculestracers_surface_bulk_forcing = .false. + config_use_macromoleculestracers_surface_restoring = .false. + config_use_macromoleculestracers_surface_value = .false. + config_use_macromoleculestracers_ttd_forcing = .false. +/ +&am_globalstats + config_am_globalstats_compute_interval = 'output_interval' + config_am_globalstats_compute_on_startup = .true. + config_am_globalstats_directory = 'analysis_members' + config_am_globalstats_enable = .true. + config_am_globalstats_output_stream = 'globalStatsOutput' + config_am_globalstats_text_file = .false. + config_am_globalstats_write_on_startup = .true. +/ +&am_surfaceareaweightedaverages + config_am_surfaceareaweightedaverages_compute_interval = '0000-00-00_01:00:00' + config_am_surfaceareaweightedaverages_compute_on_startup = .true. + config_am_surfaceareaweightedaverages_enable = .true. + config_am_surfaceareaweightedaverages_output_stream = 'surfaceAreaWeightedAveragesOutput' + config_am_surfaceareaweightedaverages_write_on_startup = .true. +/ +&am_watermasscensus + config_am_watermasscensus_compute_interval = '0000-00-00_01:00:00' + config_am_watermasscensus_compute_on_startup = .true. + config_am_watermasscensus_enable = .false. + config_am_watermasscensus_maxsalinity = 37.0 + config_am_watermasscensus_maxtemperature = 30.0 + config_am_watermasscensus_minsalinity = 32.0 + config_am_watermasscensus_mintemperature = -2.0 + config_am_watermasscensus_output_stream = 'waterMassCensusOutput' + config_am_watermasscensus_write_on_startup = .true. +/ +&am_layervolumeweightedaverage + config_am_layervolumeweightedaverage_compute_interval = '0000-00-00_01:00:00' + config_am_layervolumeweightedaverage_compute_on_startup = .true. + config_am_layervolumeweightedaverage_enable = .true. + config_am_layervolumeweightedaverage_output_stream = 'layerVolumeWeightedAverageOutput' + config_am_layervolumeweightedaverage_write_on_startup = .true. +/ +&am_zonalmean + config_am_zonalmean_compute_interval = '0000-00-00_01:00:00' + config_am_zonalmean_compute_on_startup = .true. + config_am_zonalmean_enable = .false. + config_am_zonalmean_max_bin = -1.0e34 + config_am_zonalmean_min_bin = -1.0e34 + config_am_zonalmean_num_bins = 180 + config_am_zonalmean_output_stream = 'zonalMeanOutput' + config_am_zonalmean_write_on_startup = .true. +/ +&am_okuboweiss + config_am_okuboweiss_compute_eddy_census = .true. + config_am_okuboweiss_compute_interval = '0000-00-00_01:00:00' + config_am_okuboweiss_compute_on_startup = .true. + config_am_okuboweiss_directory = 'analysis_members' + config_am_okuboweiss_eddy_min_cells = 20 + config_am_okuboweiss_enable = .false. + config_am_okuboweiss_lambda2_normalization = 1e-10 + config_am_okuboweiss_normalization = 1e-10 + config_am_okuboweiss_output_stream = 'okuboWeissOutput' + config_am_okuboweiss_threshold_value = -0.2 + config_am_okuboweiss_use_lat_lon_coords = .true. + config_am_okuboweiss_write_on_startup = .true. +/ +&am_meridionalheattransport + config_am_meridionalheattransport_compute_interval = '0000-00-00_01:00:00' + config_am_meridionalheattransport_compute_on_startup = .true. + config_am_meridionalheattransport_enable = .true. + config_am_meridionalheattransport_max_bin = -1.0e34 + config_am_meridionalheattransport_min_bin = -1.0e34 + config_am_meridionalheattransport_num_bins = 180 + config_am_meridionalheattransport_output_stream = 'meridionalHeatTransportOutput' + config_am_meridionalheattransport_region_group = '' + config_am_meridionalheattransport_write_on_startup = .true. +/ +&am_testcomputeinterval + config_am_testcomputeinterval_compute_interval = '00-00-01_00:00:00' + config_am_testcomputeinterval_compute_on_startup = .true. + config_am_testcomputeinterval_enable = .false. + config_am_testcomputeinterval_output_stream = 'testComputeIntervalOutput' + config_am_testcomputeinterval_write_on_startup = .true. +/ +&am_highfrequencyoutput + config_am_highfrequencyoutput_compute_interval = 'output_interval' + config_am_highfrequencyoutput_compute_on_startup = .false. + config_am_highfrequencyoutput_enable = .true. + config_am_highfrequencyoutput_output_stream = 'highFrequencyOutput' + config_am_highfrequencyoutput_write_on_startup = .false. +/ +&am_timefilters + config_am_timefilters_compute_cell_centered_values = .true. + config_am_timefilters_compute_interval = 'dt' + config_am_timefilters_compute_on_startup = .true. + config_am_timefilters_enable = .false. + config_am_timefilters_initialize_filters = .true. + config_am_timefilters_output_stream = 'timeFiltersOutput' + config_am_timefilters_restart_stream = 'timeFiltersRestart' + config_am_timefilters_tau = '90_00:00:00' + config_am_timefilters_write_on_startup = .true. +/ +&am_lagrparttrack + config_am_lagrparttrack_compute_interval = 'dt' + config_am_lagrparttrack_compute_on_startup = .false. + config_am_lagrparttrack_enable = .false. + config_am_lagrparttrack_filter_number = 0 + config_am_lagrparttrack_input_stream = 'lagrPartTrackInput' + config_am_lagrparttrack_output_stream = 'lagrPartTrackOutput' + config_am_lagrparttrack_region_stream = 'lagrPartTrackRegions' + config_am_lagrparttrack_reset_criteria = 'none' + config_am_lagrparttrack_reset_global_timestamp = '0000_00:00:00' + config_am_lagrparttrack_reset_if_inside_region = .false. + config_am_lagrparttrack_reset_if_outside_region = .false. + config_am_lagrparttrack_restart_stream = 'lagrPartTrackRestart' + config_am_lagrparttrack_write_on_startup = .true. +/ +&am_eliassenpalm + config_am_eliassenpalm_compute_interval = 'output_interval' + config_am_eliassenpalm_compute_on_startup = .true. + config_am_eliassenpalm_debug = .false. + config_am_eliassenpalm_enable = .false. + config_am_eliassenpalm_nbuoyancylayers = 45 + config_am_eliassenpalm_output_stream = 'eliassenPalmOutput' + config_am_eliassenpalm_restart_stream = 'eliassenPalmRestart' + config_am_eliassenpalm_rhomax_buoycoor = 1080 + config_am_eliassenpalm_rhomin_buoycoor = 900 + config_am_eliassenpalm_write_on_startup = .true. +/ +&am_mixedlayerdepths + config_am_mixedlayerdepths_compute_interval = '0000-00-00_01:00:00' + config_am_mixedlayerdepths_compute_on_startup = .true. + config_am_mixedlayerdepths_crit_dens_threshold = 0.03 + config_am_mixedlayerdepths_crit_temp_threshold = 0.2 + config_am_mixedlayerdepths_den_gradient_threshold = 5E-8 + config_am_mixedlayerdepths_dgradient = .true. + config_am_mixedlayerdepths_dthreshold = .true. + config_am_mixedlayerdepths_enable = .true. + config_am_mixedlayerdepths_interp_method = 1 + config_am_mixedlayerdepths_output_stream = 'mixedLayerDepthsOutput' + config_am_mixedlayerdepths_reference_pressure = 1.0E5 + config_am_mixedlayerdepths_temp_gradient_threshold = 5E-7 + config_am_mixedlayerdepths_tgradient = .true. + config_am_mixedlayerdepths_tthreshold = .true. + config_am_mixedlayerdepths_write_on_startup = .true. +/ +&am_regionalstatsdaily + config_am_regionalstatsdaily_1d_weighting_field = 'areaCell' + config_am_regionalstatsdaily_1d_weighting_function = 'mul' + config_am_regionalstatsdaily_2d_weighting_field = 'volumeCell' + config_am_regionalstatsdaily_2d_weighting_function = 'mul' + config_am_regionalstatsdaily_compute_interval = 'output_interval' + config_am_regionalstatsdaily_compute_on_startup = .false. + config_am_regionalstatsdaily_enable = .false. + config_am_regionalstatsdaily_input_stream = 'regionalMasksInput' + config_am_regionalstatsdaily_operation = 'avg' + config_am_regionalstatsdaily_output_stream = 'regionalStatsDailyOutput' + config_am_regionalstatsdaily_region_group = 'all' + config_am_regionalstatsdaily_region_type = 'cell' + config_am_regionalstatsdaily_restart_stream = 'regionalMasksInput' + config_am_regionalstatsdaily_vertical_dimension = 'nVertLevels' + config_am_regionalstatsdaily_vertical_mask = 'cellMask' + config_am_regionalstatsdaily_write_on_startup = .false. +/ +&am_regionalstatsweekly + config_am_regionalstatsweekly_1d_weighting_field = 'areaCell' + config_am_regionalstatsweekly_1d_weighting_function = 'mul' + config_am_regionalstatsweekly_2d_weighting_field = 'volumeCell' + config_am_regionalstatsweekly_2d_weighting_function = 'mul' + config_am_regionalstatsweekly_compute_interval = 'output_interval' + config_am_regionalstatsweekly_compute_on_startup = .false. + config_am_regionalstatsweekly_enable = .false. + config_am_regionalstatsweekly_input_stream = 'regionalMasksInput' + config_am_regionalstatsweekly_operation = 'avg' + config_am_regionalstatsweekly_output_stream = 'regionalStatsWeeklyOutput' + config_am_regionalstatsweekly_region_group = 'all' + config_am_regionalstatsweekly_region_type = 'cell' + config_am_regionalstatsweekly_restart_stream = 'regionalMasksInput' + config_am_regionalstatsweekly_vertical_dimension = 'nVertLevels' + config_am_regionalstatsweekly_vertical_mask = 'cellMask' + config_am_regionalstatsweekly_write_on_startup = .false. +/ +&am_regionalstatsmonthly + config_am_regionalstatsmonthly_1d_weighting_field = 'areaCell' + config_am_regionalstatsmonthly_1d_weighting_function = 'mul' + config_am_regionalstatsmonthly_2d_weighting_field = 'volumeCell' + config_am_regionalstatsmonthly_2d_weighting_function = 'mul' + config_am_regionalstatsmonthly_compute_interval = 'output_interval' + config_am_regionalstatsmonthly_compute_on_startup = .false. + config_am_regionalstatsmonthly_enable = .false. + config_am_regionalstatsmonthly_input_stream = 'regionalMasksInput' + config_am_regionalstatsmonthly_operation = 'avg' + config_am_regionalstatsmonthly_output_stream = 'regionalStatsMonthlyOutput' + config_am_regionalstatsmonthly_region_group = 'all' + config_am_regionalstatsmonthly_region_type = 'cell' + config_am_regionalstatsmonthly_restart_stream = 'regionalMasksInput' + config_am_regionalstatsmonthly_vertical_dimension = 'nVertLevels' + config_am_regionalstatsmonthly_vertical_mask = 'cellMask' + config_am_regionalstatsmonthly_write_on_startup = .false. +/ +&am_regionalstatscustom + config_am_regionalstatscustom_1d_weighting_field = 'areaCell' + config_am_regionalstatscustom_1d_weighting_function = 'mul' + config_am_regionalstatscustom_2d_weighting_field = 'volumeCell' + config_am_regionalstatscustom_2d_weighting_function = 'mul' + config_am_regionalstatscustom_compute_interval = 'output_interval' + config_am_regionalstatscustom_compute_on_startup = .false. + config_am_regionalstatscustom_enable = .false. + config_am_regionalstatscustom_input_stream = 'regionalMasksInput' + config_am_regionalstatscustom_operation = 'avg' + config_am_regionalstatscustom_output_stream = 'regionalStatsCustomOutput' + config_am_regionalstatscustom_region_group = 'all' + config_am_regionalstatscustom_region_type = 'cell' + config_am_regionalstatscustom_restart_stream = 'regionalMasksInput' + config_am_regionalstatscustom_vertical_dimension = 'nVertLevels' + config_am_regionalstatscustom_vertical_mask = 'cellMask' + config_am_regionalstatscustom_write_on_startup = .false. +/ +&am_timeseriesstatsdaily + config_am_timeseriesstatsdaily_backward_output_offset = '00-00-01_00:00:00' + config_am_timeseriesstatsdaily_compute_interval = '00-00-00_01:00:00' + config_am_timeseriesstatsdaily_compute_on_startup = .false. + config_am_timeseriesstatsdaily_duration_intervals = 'repeat_interval' + config_am_timeseriesstatsdaily_enable = .false. + config_am_timeseriesstatsdaily_operation = 'avg' + config_am_timeseriesstatsdaily_output_stream = 'timeSeriesStatsDailyOutput' + config_am_timeseriesstatsdaily_reference_times = 'initial_time' + config_am_timeseriesstatsdaily_repeat_intervals = 'reset_interval' + config_am_timeseriesstatsdaily_reset_intervals = '00-00-01_00:00:00' + config_am_timeseriesstatsdaily_restart_stream = 'timeSeriesStatsDailyRestart' + config_am_timeseriesstatsdaily_write_on_startup = .false. +/ +&am_timeseriesstatsmonthly + config_am_timeseriesstatsmonthly_backward_output_offset = '00-01-00_00:00:00' + config_am_timeseriesstatsmonthly_compute_interval = '00-00-00_01:00:00' + config_am_timeseriesstatsmonthly_compute_on_startup = .false. + config_am_timeseriesstatsmonthly_duration_intervals = 'repeat_interval' + config_am_timeseriesstatsmonthly_enable = .true. + config_am_timeseriesstatsmonthly_operation = 'avg' + config_am_timeseriesstatsmonthly_output_stream = 'timeSeriesStatsMonthlyOutput' + config_am_timeseriesstatsmonthly_reference_times = 'initial_time' + config_am_timeseriesstatsmonthly_repeat_intervals = 'reset_interval' + config_am_timeseriesstatsmonthly_reset_intervals = '00-01-00_00:00:00' + config_am_timeseriesstatsmonthly_restart_stream = 'timeSeriesStatsMonthlyRestart' + config_am_timeseriesstatsmonthly_write_on_startup = .false. +/ +&am_timeseriesstatsclimatology + config_am_timeseriesstatsclimatology_backward_output_offset = '00-03-00_00:00:00' + config_am_timeseriesstatsclimatology_compute_interval = '00-00-00_01:00:00' + config_am_timeseriesstatsclimatology_compute_on_startup = .false. + config_am_timeseriesstatsclimatology_duration_intervals = '00-03-00_00:00:00;00-03-00_00:00:00;00-03-00_00:00:00;00-03-00_00:00:00' + config_am_timeseriesstatsclimatology_enable = .false. + config_am_timeseriesstatsclimatology_operation = 'avg' + config_am_timeseriesstatsclimatology_output_stream = 'timeSeriesStatsClimatologyOutput' + config_am_timeseriesstatsclimatology_reference_times = '00-03-01_00:00:00;00-06-01_00:00:00;00-09-01_00:00:00;00-12-01_00:00:00' + config_am_timeseriesstatsclimatology_repeat_intervals = '01-00-00_00:00:00;01-00-00_00:00:00;01-00-00_00:00:00;01-00-00_00:00:00' + config_am_timeseriesstatsclimatology_reset_intervals = '1000-00-00_00:00:00;1000-00-00_00:00:00;1000-00-00_00:00:00;1000-00-00_00:00:00' + config_am_timeseriesstatsclimatology_restart_stream = 'timeSeriesStatsClimatologyRestart' + config_am_timeseriesstatsclimatology_write_on_startup = .false. +/ +&am_timeseriesstatscustom + config_am_timeseriesstatscustom_backward_output_offset = '00-00-01_00:00:00' + config_am_timeseriesstatscustom_compute_interval = '00-00-00_01:00:00' + config_am_timeseriesstatscustom_compute_on_startup = .false. + config_am_timeseriesstatscustom_duration_intervals = 'repeat_interval' + config_am_timeseriesstatscustom_enable = .false. + config_am_timeseriesstatscustom_operation = 'avg' + config_am_timeseriesstatscustom_output_stream = 'timeSeriesStatsCustomOutput' + config_am_timeseriesstatscustom_reference_times = 'initial_time' + config_am_timeseriesstatscustom_repeat_intervals = 'reset_interval' + config_am_timeseriesstatscustom_reset_intervals = '00-00-07_00:00:00' + config_am_timeseriesstatscustom_restart_stream = 'timeSeriesStatsCustomRestart' + config_am_timeseriesstatscustom_write_on_startup = .false. +/ +&am_pointwisestats + config_am_pointwisestats_compute_interval = 'output_interval' + config_am_pointwisestats_compute_on_startup = .true. + config_am_pointwisestats_enable = .false. + config_am_pointwisestats_output_stream = 'pointwiseStatsOutput' + config_am_pointwisestats_write_on_startup = .true. +/ +&am_debugdiagnostics + config_am_debugdiagnostics_check_state = .true. + config_am_debugdiagnostics_compute_interval = 'dt' + config_am_debugdiagnostics_compute_on_startup = .true. + config_am_debugdiagnostics_enable = .false. + config_am_debugdiagnostics_output_stream = 'debugDiagnosticsOutput' + config_am_debugdiagnostics_write_on_startup = .false. +/ +&am_rpncalculator + config_am_rpncalculator_compute_interval = '0010-00-00_00:00:00' + config_am_rpncalculator_compute_on_startup = .true. + config_am_rpncalculator_enable = .false. + config_am_rpncalculator_expression_1 = 'a b *' + config_am_rpncalculator_expression_2 = 'none' + config_am_rpncalculator_expression_3 = 'none' + config_am_rpncalculator_expression_4 = 'none' + config_am_rpncalculator_output_name_1 = 'volumeCell' + config_am_rpncalculator_output_name_2 = 'none' + config_am_rpncalculator_output_name_3 = 'none' + config_am_rpncalculator_output_name_4 = 'none' + config_am_rpncalculator_output_stream = 'none' + config_am_rpncalculator_variable_a = 'layerThickness' + config_am_rpncalculator_variable_b = 'areaCell' + config_am_rpncalculator_variable_c = 'none' + config_am_rpncalculator_variable_d = 'none' + config_am_rpncalculator_variable_e = 'none' + config_am_rpncalculator_variable_f = 'none' + config_am_rpncalculator_variable_g = 'none' + config_am_rpncalculator_variable_h = 'none' + config_am_rpncalculator_write_on_startup = .false. +/ +&am_transecttransport + config_am_transecttransport_compute_interval = 'output_interval' + config_am_transecttransport_compute_on_startup = .true. + config_am_transecttransport_enable = .false. + config_am_transecttransport_output_stream = 'transectTransportOutput' + config_am_transecttransport_transect_group = 'all' + config_am_transecttransport_write_on_startup = .true. +/ +&am_eddyproductvariables + config_am_eddyproductvariables_compute_interval = 'dt' + config_am_eddyproductvariables_compute_on_startup = .true. + config_am_eddyproductvariables_enable = .false. + config_am_eddyproductvariables_output_stream = 'eddyProductVariablesOutput' + config_am_eddyproductvariables_write_on_startup = .false. +/ +&am_mocstreamfunction + config_am_mocstreamfunction_compute_interval = 'output_interval' + config_am_mocstreamfunction_compute_on_startup = .true. + config_am_mocstreamfunction_enable = .false. + config_am_mocstreamfunction_max_bin = -1.0e34 + config_am_mocstreamfunction_min_bin = -1.0e34 + config_am_mocstreamfunction_normal_velocity_value = 'normalVelocity' + config_am_mocstreamfunction_num_bins = 180 + config_am_mocstreamfunction_output_stream = 'mocStreamfunctionOutput' + config_am_mocstreamfunction_region_group = 'all' + config_am_mocstreamfunction_transect_group = 'all' + config_am_mocstreamfunction_vertical_velocity_value = 'vertVelocityTop' + config_am_mocstreamfunction_write_on_startup = .true. +/ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-01-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-01-01.nc new file mode 100644 index 000000000..ea7b0009a Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-01-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-02-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-02-01.nc new file mode 100644 index 000000000..cd9204659 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-02-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-03-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-03-01.nc new file mode 100644 index 000000000..59d56ff94 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-03-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-04-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-04-01.nc new file mode 100644 index 000000000..bbd37e5cf Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-04-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-05-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-05-01.nc new file mode 100644 index 000000000..32318fe24 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-05-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-06-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-06-01.nc new file mode 100644 index 000000000..f32e7cbbb Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-06-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-07-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-07-01.nc new file mode 100644 index 000000000..11ad6b892 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-07-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-08-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-08-01.nc new file mode 100644 index 000000000..4e004c40b Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-08-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-09-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-09-01.nc new file mode 100644 index 000000000..8fdbfa95d Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-09-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-10-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-10-01.nc new file mode 100644 index 000000000..abd3e682d Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-10-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-11-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-11-01.nc new file mode 100644 index 000000000..2c9c07374 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-11-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-12-01.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-12-01.nc new file mode 100644 index 000000000..8487ca9c3 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.hist.am.timeSeriesStatsMonthly.0002-12-01.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/mpaso.rst.0001-01-06_00000.nc b/mpas_analysis/test/test_mpas_climatology_task/mpaso.rst.0001-01-06_00000.nc new file mode 100644 index 000000000..0d2ef0d14 Binary files /dev/null and b/mpas_analysis/test/test_mpas_climatology_task/mpaso.rst.0001-01-06_00000.nc differ diff --git a/mpas_analysis/test/test_mpas_climatology_task/streams.ocean b/mpas_analysis/test/test_mpas_climatology_task/streams.ocean new file mode 100644 index 000000000..1a025681b --- /dev/null +++ b/mpas_analysis/test/test_mpas_climatology_task/streams.ocean @@ -0,0 +1,604 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/run_mpas_analysis b/run_mpas_analysis index e2fac1b9b..a884e75d4 100755 --- a/run_mpas_analysis +++ b/run_mpas_analysis @@ -16,6 +16,7 @@ import sys import pkg_resources import shutil import os +from collections import OrderedDict from mpas_analysis.configuration import MpasAnalysisConfigParser @@ -27,130 +28,6 @@ from mpas_analysis.shared.html import generate_html from mpas_analysis.shared import AnalysisTask -def update_generate(config, generate): # {{{ - """ - Update the 'generate' config option using a string from the command line. - - Parameters - ---------- - config : ``MpasAnalysisConfigParser`` object - contains config options - - generate : str - a comma-separated string of generate flags: either names of analysis - tasks or commands of the form ``all_`` or ``no_`` indicating - that analysis with a given tag should be included or excluded). - - Authors - ------- - Xylar Asay-Davis - """ - - # overwrite the 'generate' in config with a string that parses to - # a list of string - generateList = generate.split(',') - generateString = ', '.join(["'{}'".format(element) - for element in generateList]) - generateString = '[{}]'.format(generateString) - config.set('output', 'generate', generateString) # }}} - - -def run_parallel_tasks(config, analyses, taskCount): - # {{{ - """ - Launch new processes for parallel tasks, allowing up to ``taskCount`` - tasks to run at once. - - Parameters - ---------- - config : ``MpasAnalysisConfigParser`` object - contains config options - - analyses : list of ``AnalysisTask`` objects - A list of analysis tasks to run - - taskCount : int - The maximum number of tasks that are allowed to run at once - - Authors - ------- - Xylar Asay-Davis - """ - - taskCount = min(taskCount, len(analyses)) - - runningTasks = {} - for analysisTask in analyses[0:taskCount]: - print 'Running {}'.format(analysisTask.taskName) - analysisTask.start() - runningTasks[analysisTask.taskName] = analysisTask - - remainingTasks = analyses[taskCount:] - tasksWithErrors = [] - while len(runningTasks.keys()) > 0: - analysisTask = wait_for_task(runningTasks) - taskName = analysisTask.taskName - if analysisTask._runStatus.value == AnalysisTask.SUCCESS: - print " Task {} has finished successfully.".format(taskName) - elif analysisTask._runStatus.value == AnalysisTask.FAIL: - print "ERROR in task {}. See log file {} for details".format( - taskName, analysisTask._logFileName) - tasksWithErrors.append(taskName) - else: - print "Unexpected status from in task {}. This may be a " \ - "bug.".format(taskName) - # remove the process from the process dictionary (no need to bother) - runningTasks.pop(taskName) - - if len(remainingTasks) > 0: - analysisTask = remainingTasks[0] - remainingTasks = remainingTasks[1:] - - print 'Running {}'.format(analysisTask.taskName) - analysisTask.start() - runningTasks[analysisTask.taskName] = analysisTask - - # raise the last exception so the process exits with an error - errorCount = len(tasksWithErrors) - if errorCount == 1: - print "There were errors in task {}".format(tasksWithErrors[0]) - sys.exit(1) - elif errorCount > 0: - print "There were errors in {} tasks: {}".format( - errorCount, ', '.join(tasksWithErrors)) - sys.exit(1) - # }}} - - -def wait_for_task(runningTasks, timeout=0.1): # {{{ - """ - Build a list of analysis modules based on the 'generate' config option. - New tasks should be added here, following the approach used for existing - analysis tasks. - - Parameters - ---------- - runningTasks : dict of ``AnalysisTasks`` - The tasks that are currently running, with task names as keys - - Returns - ------- - analysisTask : ``AnalysisTasks`` - A task that finished - - Authors - ------- - Xylar Asay-Davis - """ - # necessary to have a timeout so we can kill the whole thing - # with a keyboard interrupt - while True: - for analysisTask in runningTasks.itervalues(): - analysisTask.join(timeout=timeout) - if not analysisTask.is_alive(): - return analysisTask # }}} - - def build_analysis_list(config): # {{{ """ Build a list of analysis tasks. New tasks should be added here, following @@ -179,26 +56,40 @@ def build_analysis_list(config): # {{{ # analysis can only be imported after the right MPL renderer is selected from mpas_analysis import ocean from mpas_analysis import sea_ice + from mpas_analysis.shared.climatology import MpasClimatologyTask # analyses will be a list of analysis classes analyses = [] # Ocean Analyses - - analyses.append(ocean.ClimatologyMapMLD(config)) - analyses.append(ocean.ClimatologyMapSST(config)) - analyses.append(ocean.ClimatologyMapSSS(config)) + oceanClimatolgyTask = MpasClimatologyTask(config=config, + componentName='ocean') + analyses.append(oceanClimatolgyTask) + analyses.append(ocean.ClimatologyMapMLD(config, oceanClimatolgyTask)) + analyses.append(ocean.ClimatologyMapSST(config, oceanClimatolgyTask)) + analyses.append(ocean.ClimatologyMapSSS(config, oceanClimatolgyTask)) analyses.append(ocean.TimeSeriesOHC(config)) analyses.append(ocean.TimeSeriesSST(config)) - analyses.append(ocean.MeridionalHeatTransport(config)) - analyses.append(ocean.StreamfunctionMOC(config)) + analyses.append(ocean.MeridionalHeatTransport(config, oceanClimatolgyTask)) + analyses.append(ocean.StreamfunctionMOC(config, oceanClimatolgyTask)) analyses.append(ocean.IndexNino34(config)) # Sea Ice Analyses - analyses.append(sea_ice.ClimatologyMapSeaIceConc(config, hemisphere='NH')) - analyses.append(sea_ice.ClimatologyMapSeaIceThick(config, hemisphere='NH')) - analyses.append(sea_ice.ClimatologyMapSeaIceConc(config, hemisphere='SH')) - analyses.append(sea_ice.ClimatologyMapSeaIceThick(config, hemisphere='SH')) + seaIceClimatolgyTask = MpasClimatologyTask(config=config, + componentName='seaIce') + analyses.append(seaIceClimatolgyTask) + analyses.append(sea_ice.ClimatologyMapSeaIceConc(config, + seaIceClimatolgyTask, + hemisphere='NH')) + analyses.append(sea_ice.ClimatologyMapSeaIceThick(config, + seaIceClimatolgyTask, + hemisphere='NH')) + analyses.append(sea_ice.ClimatologyMapSeaIceConc(config, + seaIceClimatolgyTask, + hemisphere='SH')) + analyses.append(sea_ice.ClimatologyMapSeaIceThick(config, + seaIceClimatolgyTask, + hemisphere='SH')) analyses.append(sea_ice.TimeSeriesSeaIce(config)) return analyses # }}} @@ -226,80 +117,269 @@ def determine_analyses_to_generate(analyses): # {{{ Xylar Asay-Davis """ + analysesToGenerate = OrderedDict() # check which analysis we actually want to generate and only keep those - analysesToGenerate = [] for analysisTask in analyses: - # for each anlaysis module, check if we want to generate this task - # and if the analysis task has a valid configuration - if analysisTask.check_generate(): - add = False - try: - analysisTask.setup_and_check() - add = True - except (Exception, BaseException): - traceback.print_exc(file=sys.stdout) - print "ERROR: analysis module {} failed during check and " \ - "will not be run".format(analysisTask.taskName) - if add: - analysesToGenerate.append(analysisTask) + # update the dictionary with this task and perhaps its subtasks + add_task_and_subtasks(analysisTask, analysesToGenerate) return analysesToGenerate # }}} -def run_analysis(config, analyses): # {{{ +def add_task_and_subtasks(analysisTask, analysesToGenerate, + callCheckGenerate=True): + # {{{ """ - Run one or more analysis tasks + If a task has been requested through the generate config option or + if it is a prerequisite of a requested task, add it to the dictionary of + tasks to generate. + + Parameters + ---------- + analysisTask : ``AnalysisTask`` + A task to be added + + analysesToGenerate : ``OrderedDict`` of ``AnalysisTask`` + The list of analysis tasks to be generated, which this call may + update to include this task and its subtasks + + callCheckGenerate : bool + Whether the ``check_generate`` method should be call for this task to + see if it has been requested. We skip this for subtasks and + prerequisites, since they are needed by another task regardless of + whether the user specifically requested them. + + Authors + ------- + Xylar Asay-Davis + """ + + key = (analysisTask.taskName, analysisTask.subtaskName) + if key in analysesToGenerate.keys(): + # The task was already added + assert(analysisTask._setupStatus == 'success') + return + + # for each anlaysis task, check if we want to generate this task + # and if the analysis task has a valid configuration + taskTitle = analysisTask.printTaskName + if callCheckGenerate and not analysisTask.check_generate(): + # we don't need to add this task -- it wasn't requested + return + + # first, we should try to add the prerequisites of this task and its + # subtasks (if they aren't also subtasks for this task) + prereqs = analysisTask.runAfterTasks + for subtask in analysisTask.subtasks: + for prereq in subtask.runAfterTasks: + if prereq not in analysisTask.subtasks: + prereqs.extend(subtask.runAfterTasks) + + for prereq in prereqs: + add_task_and_subtasks(prereq, analysesToGenerate, + callCheckGenerate=False) + if prereq._setupStatus != 'success': + print prereq._setupStatus + # a prereq failed setup_and_check + print "ERROR: prerequisite task {} of analysis task {}" \ + " failed during check,\n" \ + " so this task will not be run".format( + prereq.printTaskName, taskTitle) + analysisTask._setupStatus = 'fail' + return + + # make sure all prereqs have been set up successfully before trying to + # set up this task -- this task's setup may depend on setup in the prereqs + try: + analysisTask.setup_and_check() + except (Exception, BaseException): + traceback.print_exc(file=sys.stdout) + print "ERROR: analysis task {} failed during check and " \ + "will not be run".format(taskTitle) + analysisTask._setupStatus = 'fail' + return + + # next, we should try to add the subtasks. This is done after the current + # analysis task has been set up in case subtasks depend on information + # from the parent task + for subtask in analysisTask.subtasks: + add_task_and_subtasks(subtask, analysesToGenerate, + callCheckGenerate=False) + if subtask._setupStatus != 'success': + # a subtask failed setup_and_check + print "ERROR: a subtask of analysis task {}" \ + " failed during check,\n" \ + " so this task will not be run".format(taskTitle) + analysisTask._setupStatus = 'fail' + return + + analysesToGenerate[key] = analysisTask + analysisTask._setupStatus = 'success' + # }}} + + +def update_generate(config, generate): # {{{ + """ + Update the 'generate' config option using a string from the command line. Parameters ---------- config : ``MpasAnalysisConfigParser`` object contains config options - analyses : list of ``AnalysisTask`` objects - A list of analysis tasks to run + generate : str + a comma-separated string of generate flags: either names of analysis + tasks or commands of the form ``all_`` or ``no_`` indicating + that analysis with a given tag should be included or excluded). + + Authors + ------- + Xylar Asay-Davis + """ + + # overwrite the 'generate' in config with a string that parses to + # a list of string + generateList = generate.split(',') + generateString = ', '.join(["'{}'".format(element) + for element in generateList]) + generateString = '[{}]'.format(generateString) + config.set('output', 'generate', generateString) # }}} + + +def run_analysis(config, analyses): # {{{ + """ + Run all the tasks, either in serial or in parallel + + Parameters + ---------- + config : ``MpasAnalysisConfigParser`` object + contains config options - Raises - ------ - Exception: - If one or more tasks raise exceptions, re-raises the last exception - after all tasks have completed to indicate that there was a problem + analyses : OrderedDict of ``AnalysisTask`` objects + A dictionary of analysis tasks to run with (task, subtask) names as + keys Authors ------- Xylar Asay-Davis """ - # run each analysis task + taskCount = config.getWithDefault('execute', 'parallelTaskCount', + default=1) + + isParallel = taskCount > 1 and len(analyses) > 1 + + for analysisTask in analyses.itervalues(): + if not analysisTask.runAfterTasks and not analysisTask.subtasks: + analysisTask._runStatus.value = AnalysisTask.READY + else: + analysisTask._runStatus.value = AnalysisTask.BLOCKED + tasksWithErrors = [] - lastStacktrace = None - for analysisTask in analyses: - analysisTask.run(writeLogFile=False) - if analysisTask._runStatus.value == AnalysisTask.FAIL: - lastStacktrace = analysisTask._stackTrace - tasksWithErrors.append(analysisTask.taskName) + runningTasks = {} - if config.getboolean('plot', 'displayToScreen'): + # run each analysis task + while True: + # we still have tasks to run + for analysisTask in analyses.itervalues(): + if analysisTask._runStatus.value == AnalysisTask.BLOCKED: + prereqs = analysisTask.runAfterTasks + analysisTask.subtasks + prereqStatus = [prereq._runStatus.value for prereq in prereqs] + if any([runStatus == AnalysisTask.FAIL for runStatus in + prereqStatus]): + # a prerequisite failed so this task cannot succeed + analysisTask._runStatus.value = AnalysisTask.FAIL + if all([runStatus == AnalysisTask.SUCCESS for runStatus in + prereqStatus]): + # no unfinished prerequisites so we can run this task + analysisTask._runStatus.value = AnalysisTask.READY + + unfinishedCount = 0 + for analysisTask in analyses.itervalues(): + if analysisTask._runStatus.value not in [AnalysisTask.SUCCESS, + AnalysisTask.FAIL]: + unfinishedCount += 1 + + if unfinishedCount <= 0: + # we're done + break + + # launch new tasks + for key, analysisTask in analyses.items(): + if analysisTask._runStatus.value == AnalysisTask.READY: + if isParallel: + print 'Running {}'.format(analysisTask.printTaskName) + analysisTask._runStatus.value = AnalysisTask.RUNNING + analysisTask.start() + runningTasks[key] = analysisTask + if len(runningTasks.keys()) >= taskCount: + break + else: + analysisTask.run(writeLogFile=False) + + if isParallel: + # wait for a task to finish + analysisTask = wait_for_task(runningTasks) + key = (analysisTask.taskName, analysisTask.subtaskName) + runningTasks.pop(key) + + taskTitle = analysisTask.printTaskName + + if analysisTask._runStatus.value == AnalysisTask.SUCCESS: + print " Task {} has finished successfully.".format(taskTitle) + elif analysisTask._runStatus.value == AnalysisTask.FAIL: + print "ERROR in task {}. See log file {} for details".format( + taskTitle, analysisTask._logFileName) + tasksWithErrors.append(taskTitle) + else: + print "Unexpected status from in task {}. This may be a " \ + "bug.".format(taskTitle) + + if not isParallel and config.getboolean('plot', 'displayToScreen'): import matplotlib.pyplot as plt plt.show() - # See if there were errors; exit(1) if so + # raise the last exception so the process exits with an error errorCount = len(tasksWithErrors) if errorCount == 1: - if len(analyses) > 1: - print "There were errors in task {}".format(tasksWithErrors[0]) - print "The stacktrace was:" - print lastStacktrace + print "There were errors in task {}".format(tasksWithErrors[0]) sys.exit(1) elif errorCount > 0: print "There were errors in {} tasks: {}".format( errorCount, ', '.join(tasksWithErrors)) - print "The last stacktrace was:" - print lastStacktrace sys.exit(1) - # }}} +def wait_for_task(runningTasks, timeout=0.1): # {{{ + """ + Build a list of analysis modules based on the 'generate' config option. + New tasks should be added here, following the approach used for existing + analysis tasks. + + Parameters + ---------- + runningTasks : dict of ``AnalysisTasks`` + The tasks that are currently running, with task names as keys + + Returns + ------- + analysisTask : ``AnalysisTasks`` + A task that finished + + Authors + ------- + Xylar Asay-Davis + """ + # necessary to have a timeout so we can kill the whole thing + # with a keyboard interrupt + while True: + for analysisTask in runningTasks.itervalues(): + analysisTask.join(timeout=timeout) + if not analysisTask.is_alive(): + return analysisTask # }}} + + if __name__ == "__main__": parser = argparse.ArgumentParser( @@ -365,14 +445,8 @@ if __name__ == "__main__": analyses = build_analysis_list(config) analyses = determine_analyses_to_generate(analyses) - parallelTaskCount = config.getWithDefault('execute', 'parallelTaskCount', - default=1) - if not args.setup_only and not args.html_only: - if parallelTaskCount <= 1 or len(analyses) == 1: - run_analysis(config, analyses) - else: - run_parallel_tasks(config, analyses, parallelTaskCount) + run_analysis(config, analyses) if not args.setup_only: generate_html(config, analyses)