diff --git a/.gitignore b/.gitignore index 42b70ac71..241f7e5af 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ # User defined config files -/config.analysis.* -/config.analysis_* +/config.* +!/config.template # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/README.md b/README.md index 0bf44dea0..70b754a9d 100644 --- a/README.md +++ b/README.md @@ -29,3 +29,8 @@ You can easily install them via the conda command: ``` conda install -c scitools -c https://conda.anaconda.org/opengeostat numpy scipy matplotlib ipython notebook netCDF4 progressbar vtk cartopy xarray dask bottleneck pyevtk numexpr basemap ``` + +## Running the analysis + 1. create a configuration file by copying and modifying `config.template` or one of the example files in `configs` + 2. run: `./run_analysis.py config.myrun`, where `config.myrun` is the config file you created + 3. If you want to run a subset of the analysis, you can either modify the `generate` option under `[output]` in the config file or use the `--generate` flag on the command line. See `config.template` for more details. diff --git a/config.analysis b/config.analysis deleted file mode 100644 index 4605721ff..000000000 --- a/config.analysis +++ /dev/null @@ -1,263 +0,0 @@ -[case] -# ACME case configuration -casename = 20160805v0atm.A_WCYCL1850_v0atm.ne30_oEC.edison.alpha7_00 -native_res = ne30 -short_term_archive = 0 -ref_casename_v0 = B1850C5_ne30_v0.4 - -[input] -# names of namelist and streams files -ocean_namelist_filename = mpas-o_in -ocean_streams_filename = streams.ocean -seaice_namelist_filename = mpas-cice_in -seaice_streams_filename = streams.cice - -[paths] -# paths to simulation and observational datasets -archive_dir = /scratch1/scratchdirs/petercal/ACME_simulations -archive_dir_ocn = /scratch1/scratchdirs/petercal/ACME_simulations/20160805v0atm.A_WCYCL1850_v0atm.ne30_oEC.edison.alpha7_00/run -scratch_dir = /global/project/projectdirs/acme/xylar/20160805v0atm.A_WCYCL1850_v0atm.ne30_oEC.edison.alpha7_00.test.pp -plots_dir = /global/project/projectdirs/acme/xylar/coupled_diagnostics_20160805v0atm.A_WCYCL1850_v0atm.ne30_oEC.edison.alpha7_00-20160520.A_WCYCL1850.ne30_oEC.edison.alpha6_01 -log_dir = /global/project/projectdirs/acme/xylar/coupled_diagnostics_20160805v0atm.A_WCYCL1850_v0atm.ne30_oEC.edison.alpha7_00-20160520.A_WCYCL1850.ne30_oEC.edison.alpha6_01.logs -obs_ocndir = /global/project/projectdirs/acme/observations/Ocean -obs_sstdir = /global/project/projectdirs/acme/observations/Ocean/SST -obs_sssdir = /global/project/projectdirs/acme/observations/Ocean/SSS -obs_mlddir = /global/project/projectdirs/acme/observations/Ocean/MLD -obs_seaicedir = /global/project/projectdirs/acme/observations/SeaIce -ref_archive_v0_ocndir = /global/project/projectdirs/acme/ACMEv0_lowres/B1850C5_ne30_v0.4/ocn/postprocessing -ref_archive_v0_seaicedir = /global/project/projectdirs/acme/ACMEv0_lowres/B1850C5_ne30_v0.4/ice/postprocessing - -[data] -# paths to mesh and mapping files -mpas_remapfile = /global/project/projectdirs/acme/mapping/maps/map_oEC60to30_TO_0.5x0.5degree_blin.160412.nc -pop_remapfile = /global/project/projectdirs/acme/mapping/maps/map_gx1v6_TO_0.5x0.5degree_blin.160413.nc -# path to climotology dataset -mpas_climodir = /global/project/projectdirs/acme/xylar/20160805v0atm.A_WCYCL1850_v0atm.ne30_oEC.edison.alpha7_00.test.pp - -[seaIceData] -# paths to sea-ice observational datasets -obs_iceareaNH = /global/project/projectdirs/acme/observations/SeaIce/IceArea_timeseries/iceAreaNH_climo.nc -obs_iceareaSH = /global/project/projectdirs/acme/observations/SeaIce/IceArea_timeseries/iceAreaSH_climo.nc -obs_icevolNH = /global/project/projectdirs/acme/observations/SeaIce/PIOMAS/PIOMASvolume_monthly_climo.nc -obs_icevolSH = none - -[time] -# the first year over which to average climotologies -climo_yr1 = 6 -# the last year over which to average climotologies -climo_yr2 = 10 -# the offset year to be added to simulation years -yr_offset = 1849 -# start and end years for timeseries analysis -timeseries_yr1 = 1 -timeseries_yr2 = 9999 - -[ohc_timeseries] -generate = 1 - -[sst_timeseries] -generate = 1 - -[nino34_timeseries] -generate = 0 - -[mht_timeseries] -generate = 0 - -[moc_timeseries] -generate = 0 - -[sst_modelvsobs] -generate = 1 - -[sss_modelvsobs] -generate = 1 - -[mld_modelvsobs] -generate = 1 - -[seaice_timeseries] -generate = 1 - -[seaice_modelvsobs] -generate = 1 - -[ohc_timeseries] -## compare to output from another model run? -#compare_with_model = True -# compare to observations? -compare_with_obs = False -# list of region indices to plot from the region list below -regionIndicesToPlot = [6] -# Number of points over which to compute moving average (e.g., for monthly -# output, N_movavg=12 corresponds to a 12-month moving average window) -N_movavg = 12 - -[sst_timeseries] -## compare to output from another model run? -#compare_with_model = True -# compare to observations? -compare_with_obs = True -# list of region indices to plot from the region list below -regionIndicesToPlot = [6] -# Number of points over which to compute moving average (e.g., for monthly -# output, N_movavg=12 corresponds to a 12-month moving average window) -N_movavg = 12 - -[nino34_timeseries] -## compare to output from another model run? -#compare_with_model = True -# compare to observations? -compare_with_obs = True -# Number of points over which to compute moving average (e.g., for monthly -# output, N_movavg=12 corresponds to a 12-month moving average window) -N_movavg = 12 - -[mht_timeseries] -## compare to output from another model run? -#compare_with_model = True -# compare to observations? -compare_with_obs = True -# Number of points over which to compute moving average (e.g., for monthly -# output, N_movavg=12 corresponds to a 12-month moving average window) -N_movavg = 12 - -[moc_timeseries] -## compare to output from another model run? -#compare_with_model = True -# compare to observations? -compare_with_obs = True -# Number of points over which to compute moving average (e.g., for monthly -# output, N_movavg=12 corresponds to a 12-month moving average window) -N_movavg = 12 - -[seaice_timeseries] -## compare to output from another model run? -#compare_with_model = True -# compare to observations? -compare_with_obs = True -# Number of points over which to compute moving average (e.g., for monthly -# output, N_movavg=12 corresponds to a 12-month moving average window) -N_movavg = 1 -# title font properties -title_font_size = 18 - -[sst_modelvsobs] -# colormap for model/observations -#cmapModelObs = viridis -cmapModelObs = RdYlBu_r -# colormap for differences -#cmapDiff = RdBu_r -cmapDiff = coolwarm - -# indices into cmapModelObs for contour color -cmapIndicesModelObs = [0, 40, 80, 110, 140, 170, 200, 230, 255] -# indices into cmapModelObs for contour color -cmapIndicesDiff = [0, 40, 80, 120, 140, 170, 210, 255] -#cmapIndicesDiff = [0, 40, 80, 127, 170, 210, 255] # good for RdBu_r - -# colormap levels/values for contour boundaries -clevsModelObs = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] -clevsDiff = [-5, -3, -2, -1, 0, 1, 2, 3, 5] -#clevsDiff = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] # good for RdBu_r - -# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) -comparisonTimes = ['JFM', 'JAS', 'ANN'] - -[sss_modelvsobs] -# colormap for model/observations -#cmapModelObs = viridis -cmapModelObs = RdYlBu_r -# colormap for differences -#cmapDiff = RdBu_r -cmapDiff = coolwarm - -# indices into cmapModelObs for contour color -cmapIndicesModelObs = [0,40,80,110,140,170,200,230,255] -# indices into cmapModelObs for contour color -cmapIndicesDiff = [0,40,80,120,140,170,210,255] -#cmapIndicesDiff = [0,40,80,127,170,210,255] # good for RdBu_r - -# colormap levels/values for contour boundaries -clevsModelObs = [28,29,30,31,32,33,34,35,36,38] -clevsDiff = [-3,-2,-1,-0.5,0.5,1,2,3] -#clevsDiff = [-3,-2,-1,-0.5,0.5,1,2,3] # good for RdBu_r - -# Times for comparison times (Jan,Feb,Mar,Apr,May,Jun,Jul,Aug,Sep,Oct,Nov,Dec,JFM,AMJ,JAS,OND,ANN) -comparisonTimes = ['JFM','JAS','ANN'] - -[mld_modelvsobs] -# colormap for model/observations -cmapModelObs = viridis -#cmapModelObs = RdYlBu_r -# colormap for differences -cmapDiff = RdBu_r -#cmapDiff = coolwarm - -# indices into cmapModelObs for contour color -cmapIndicesModelObs = [0, 40, 80, 110, 140, 170, 200, 230, 255] -# indices into cmapModelObs for contour color -cmapIndicesDiff = [0, 40, 80, 120, 140, 170, 210, 255] -#cmapIndicesDiff = [0, 40, 80, 127, 170, 210, 255] # good for RdBu_r - -# colormap levels/values for contour boundaries -clevsModelObs = [0, 20, 40, 60, 80, 100, 150, 200, 400, 800] -clevsDiff = [-175, -125, -75, -25, -10, 10, 25, 75, 125, 175] - -# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) -comparisonTimes = ['JFM', 'JAS', 'ANN'] - -[seaice_modelvsobs] -# colormap for model/observations -cmapModelObs = inferno -# colormap for differences -cmapDiff = RdBu_r - -# indices into cmapModelObs for contour color -cmapIndicesModelObs = [20, 80, 110, 140, 170, 200, 230, 255] -# indices into cmapModelObs for contour color -cmapIndicesDiff = [0, 40, 80, 127, 127, 170, 210, 255] - -# colormap levels/values for contour boundaries (ice conncentration winter) -clevsModelObs_conc_win = [0.15, 0.4, 0.7, 0.9, 0.94, 0.96, 0.98, 0.99, 1] -clevsDiff_conc_win = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] - -# colormap levels/values for contour boundaries (ice conncentration summer) -clevsModelObs_conc_sum = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] -clevsDiff_conc_sum = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] - -# colormap levels/values for contour boundaries (ice thickness NH) -clevsModelObs_thick_NH = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5] -clevsDiff_thick_NH = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] - -# colormap levels/values for contour boundaries (ice thickness SH) -clevsModelObs_thick_SH = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5] -clevsDiff_thick_SH = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] - -# reference lat/lon for sea ice plots in the NH -latmin_NH = 50 -lon0_NH = 0 -# reference lat/lon for sea ice plots in the SH -latmin_SH = -50 -lon0_SH = 180 - -[regions] -# list of region names (needs to be in the same order as region indices in -# time-series stats) -regions = ['arctic', 'equatorial', 'so', 'nino3', 'nino4', 'nino3.4', 'global'] -# list of plot titles (needs to be in the same order as region indices in -# time-series stats) -plot_titles = ['Arctic', 'Equatorial (15S-15N)', 'Southern Ocean', 'Nino 3', 'Nino 4', 'Nino 3.4', 'Global Ocean'] - -[plot] -# set to true if you want plots to be displayed, rather than just written out -# Note: displayToScreen = True seems to hang on Edison on large data sets, -# so suggested use is just for debugging either locally or with small data sets -displayToScreen = False - -# font size on axes -axis_font_size = 16 -# title font properties -title_font_size = 20 -title_font_color = black -title_font_weight = normal diff --git a/config.template b/config.template new file mode 100644 index 000000000..359138b77 --- /dev/null +++ b/config.template @@ -0,0 +1,370 @@ +[runs] +## options related to the run to be analyzed and reference runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = runName +# referenceRunName is the name of a reference run to compare against (or None +# to turn off comparison with a reference, e.g. if no reference case is +# available) +referenceRunName = None +# preprocessedReferenceRunName is the name of a reference run that has been +# preprocessed to compare against (or None to turn off comparison). Reference +# runs of this type would have preprocessed results because they were not +# performed with MPAS components (so they cannot be easily ingested by +# MPAS-Analysis) +preprocessedReferenceRunName = None + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /dir/to/model/output +# names of namelist and streams files. If not in baseDirectory, give full path +oceanNamelistFileName = mpas-o_in +oceanStreamsFileName = streams.ocean +seaIceNamelistFileName = mpas-cice_in +seaIceStreamsFileName = streams.cice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /dir/to/analysis/output + +# subdirectories within baseDirectory for analysis output +scratchSubdirectory = scratch +plotsSubdirectory = plots +logsSubdirectory = logs +climatologySubdirectory = clim + +# a list of analyses to generate. Valid names are: +# 'timeSeriesOHC', 'timeSeriesSST', 'regriddedSST', +# 'regriddedSSS', 'regriddedMLD', 'timeSeriesSeaIceAreaVol', +# 'regriddedSeaIceConcThick' +# the following shortcuts exist: +# 'all' -- all analyses will be run +# 'all_timeSeries' -- all time-series analyses will be run +# 'all_regriddedHorizontal' -- all analyses involving regridded horizontal +# fields will be run +# 'all_ocean' -- all ocean analyses will be run +# 'all_seaIce' -- all sea-ice analyses will be run +# 'no_timeSeriesOHC' -- skip 'timeSeriesOHC' (and similarly with the +# other analyses). +# 'no_ocean', 'no_timeSeries', etc. -- in analogy to 'all_*', skip the +# given category of analysis +# an equivalent syntax can be used on the command line to override this +# option: +# ./run_analysis.py config.analysis --generate \ +# all,no_ocean,all_timeSeries +generate = ['all'] + +# alternative examples that would perform all analysis except +# 'timeSeriesOHC' +#generate = ['timeSeriesSST', 'all_regriddedHorizontal', 'all_seaIce'] +#generate = ['all', 'no_timeSeriesOHC'] +# Each subsequent list entry can be used to alter previous list entries. For +# example, the following would produce all analyses except regriddedSST, +# regriddedSSS and regriddedMLD (albeit not in a very intuitive way): +#generate = ['all', 'no_ocean', 'all_timeSeries'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 11 +# the last year over which to average climatalogies +endYear = 20 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Using out-of-bounds values +# like start_year = 1 and end_year = 9999 will be clipped to the valid range +# of years, and is a good way of insuring that all values are used. +startYear = 1 +endYear = 9999 + +[oceanObservations] +## options related to ocean observations with which the results will be compared + +# directory where ocean observations are stored +baseDirectory = /dir/to/ocean/observations +sstSubdirectory = SST +sssSubdirectory = SSS +mldSubdirectory = MLD + +[oceanReference] +## options related to ocean reference run with which the results will be +## compared + +# directory where ocean reference simulation results are stored +baseDirectory = /dir/to/ocean/reference + +[oceanPreprocessedReference] +## options related to preprocessed ocean reference run with which the results +## will be compared (e.g. a POP, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /dir/to/ocean/reference + +[seaIceObservations] +## options related to sea ice observations with which the results will be +## compared + +# directory where sea ice observations are stored +baseDirectory = /dir/to/seaice/observations +areaNH = IceArea_timeseries/iceAreaNH_climo.nc +areaSH = IceArea_timeseries/iceAreaSH_climo.nc +volNH = PIOMAS/PIOMASvolume_monthly_climo.nc +volSH = none + +[seaIceReference] +## options related to sea ice reference run with which the results will be +## compared + +# directory where sea ice reference simulation results are stored +baseDirectory = /dir/to/seaice/reference + +[seaIcePreprocessedReference] +## options related to preprocessed sea ice reference run with which the results +## will be compared (e.g. a CICE, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /dir/to/seaice/reference + +[time] +## options related to dates and times for all runs (main, reference and +## preprocessed) + +# the offset in years to be added to simulation years to prevent xarray +# calendar problems. (Hopefully, this will be removed soon.) +yearOffset = 1849 + +[remapping] +## options related to horizontal remapping + +# paths to mapping files (which will hopefully be eliminated soon) +mpasRemapFile = /global/project/projectdirs/acme/mapping/maps/map_oEC60to30_TO_0.5x0.5degree_blin.160412.nc + +[timeSeriesOHC] +## options related to plotting time series of ocean heat content (OHC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = False +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSST] +## options related to plotting time series of sea surface temperature (SST) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesNino34] +## options related to plotting time series of the El Nino 3.4 index + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMHT] +## options related to plotting time series of meridional heat transport (MHT) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMOC] +## options related to plotting time series of meridional overturning +## circulation (MOC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSeaIceAreaVol] +## options related to plotting time series of sea ice area and volume + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 1 +# title font properties +titleFontSize = 18 + +[regriddedSST] +## options related to plotting horizontally regridded sea surface temperature +## (SST) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-5, -3, -2, -1, 0, 1, 2, 3, 5] + +# alternative colormap/index suggestions +#resultColormap = viridis +#differenceColormap = RdBu_r +#differenceColormapIndices = [0, 40, 80, 127, 170, 210, 255] +#differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSSS] +## options related to plotting horizontally regridded sea surface salinity +## (SSS) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [28, 29, 30, 31, 32, 33, 34, 35, 36, 38] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedMLD] +## options related to plotting horizontally regridded mixed layer depth +## (MLD) against reference model results and observations + +# colormap for model/observations +resultColormap = viridis +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [0, 20, 40, 60, 80, 100, 150, 200, 400, 800] + +###### Xylar's note: I think something is off here. I think there should +###### be one more contour value than there are indices but there are 2 in MLD +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-175, -125, -75, -25, -10, 10, 25, 75, 125, 175] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSeaIceConcThick] +## options related to plotting horizontally regridded sea ice concentration +## and thickness against reference model results and observations + +# colormap for model/observations +resultColormap = inferno +# indices into resultColormap for contour color +resultColormapIndices = [20, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +resultConcWinterContourValues = [0.15, 0.4, 0.7, 0.9, 0.94, 0.96, 0.98, 0.99, 1] +resultConcSummerContourValues = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] +# thickness in the northern and southern hemispheres +resultThickNHContourValues = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5] +resultThickSHContourValues = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5] + +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 127, 127, 170, 210, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +differenceConcWinterContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +differenceConcSummerContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +# thickness in the northern and southern hemispheres +differenceThickNHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] +differenceThickSHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] + +# reference lat/lon for sea ice plots in the northern hemisphere +minimumLatitudeNH = 50 +referenceLongitudeNH = 0 +# reference lat/lon for sea ice plots in the southern hemisphere +minimumLatitudeSH = -50 +referenceLongitudeSH = 180 + +[regions] +## options related to ocean regions used in several analysis modules + +# list of region names (needs to be in the same order as region indices in +# time-series stats) +regions = ['arctic', 'equatorial', 'so', 'nino3', 'nino4', 'nino3.4', 'global'] +# list of plot titles (needs to be in the same order as region indices in +# time-series stats) +plotTitles = ['Arctic', 'Equatorial (15S-15N)', 'Southern Ocean', 'Nino 3', 'Nino 4', 'Nino 3.4', 'Global Ocean'] + +[plot] +## options related to plotting that are the defaults across all analysis modules + +# set to true if you want plots to be displayed (one by one) to the screen in +# addition to being written out to png files +# Note: displayToScreen = True seems to hang on Edison on large data sets, +# so suggested use is just for debugging either locally or with small data sets +displayToScreen = False + +# font size on axes +axisFontSize = 16 +# title font properties +titleFontSize = 20 +titleFontColor = black +titleFontWeight = normal diff --git a/configs/README.md b/configs/README.md new file mode 100644 index 000000000..c2edd326c --- /dev/null +++ b/configs/README.md @@ -0,0 +1,9 @@ +# MPAS-Analysis + +Example config files for various HPC machines and various runs. + +The intended usage is to copy one of these examples to the root of +MPAS-Analysis (where `run_analysis.py` is located) before modifying them +(e.g. setting the output `baseDirectory`) and using them to run the +analysis. + diff --git a/configs/edison/config.20161117.beta0.A_WCYCL1850.ne30_oEC.edison b/configs/edison/config.20161117.beta0.A_WCYCL1850.ne30_oEC.edison new file mode 100644 index 000000000..5ed53f98d --- /dev/null +++ b/configs/edison/config.20161117.beta0.A_WCYCL1850.ne30_oEC.edison @@ -0,0 +1,370 @@ +[runs] +## options related to the run to be analyzed and reference runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = 20161117.beta0.A_WCYCL1850.ne30_oEC.edison +# referenceRunName is the name of a reference run to compare against (or None +# to turn off comparison with a reference, e.g. if no reference case is +# available) +referenceRunName = None +# preprocessedReferenceRunName is the name of a reference run that has been +# preprocessed to compare against (or None to turn off comparison). Reference +# runs of this type would have preprocessed results because they were not +# performed with MPAS components (so they cannot be easily ingested by +# MPAS-Analysis) +preprocessedReferenceRunName = B1850C5_ne30_v0.4 + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /scratch2/scratchdirs/golaz/ACME_simulations/20161117.beta0.A_WCYCL1850.ne30_oEC.edison/run/ +# names of namelist and streams files. If not in baseDirectory, give full path +oceanNamelistFileName = mpas-o_in +oceanStreamsFileName = streams.ocean +seaIceNamelistFileName = mpas-cice_in +seaIceStreamsFileName = streams.cice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /dir/to/analysis/output + +# subdirectories within baseDirectory for analysis output +scratchSubdirectory = scratch +plotsSubdirectory = plots +logsSubdirectory = logs +climatologySubdirectory = clim + +# a list of analyses to generate. Valid names are: +# 'timeSeriesOHC', 'timeSeriesSST', 'regriddedSST', +# 'regriddedSSS', 'regriddedMLD', 'timeSeriesSeaIceAreaVol', +# 'regriddedSeaIceConcThick' +# the following shortcuts exist: +# 'all' -- all analyses will be run +# 'all_timeSeries' -- all time-series analyses will be run +# 'all_regriddedHorizontal' -- all analyses involving regridded horizontal +# fields will be run +# 'all_ocean' -- all ocean analyses will be run +# 'all_seaIce' -- all sea-ice analyses will be run +# 'no_timeSeriesOHC' -- skip 'timeSeriesOHC' (and similarly with the +# other analyses). +# 'no_ocean', 'no_timeSeries', etc. -- in analogy to 'all_*', skip the +# given category of analysis +# an equivalent syntax can be used on the command line to override this +# option: +# ./run_analysis.py config.analysis --generate \ +# all,no_ocean,all_timeSeries +generate = ['all'] + +# alternative examples that would perform all analysis except +# 'timeSeriesOHC' +#generate = ['timeSeriesSST', 'all_regriddedHorizontal', 'all_seaIce'] +#generate = ['all', 'no_timeSeriesOHC'] +# Each subsequent list entry can be used to alter previous list entries. For +# example, the following would produce all analyses except regriddedSST, +# regriddedSSS and regriddedMLD (albeit not in a very intuitive way): +#generate = ['all', 'no_ocean', 'all_timeSeries'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 41 +# the last year over which to average climatalogies +endYear = 50 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Using out-of-bounds values +# like start_year = 1 and end_year = 9999 will be clipped to the valid range +# of years, and is a good way of insuring that all values are used. +startYear = 1 +endYear = 51 + +[oceanObservations] +## options related to ocean observations with which the results will be compared + +# directory where ocean observations are stored +baseDirectory = / +sstSubdirectory = global/project/projectdirs/acme/observations/Ocean/SST +sssSubdirectory = global/homes/l/lvroekel +mldSubdirectory = /global/project/projectdirs/acme/observations/Ocean/MLD + +[oceanReference] +## options related to ocean reference run with which the results will be +## compared + +# directory where ocean reference simulation results are stored +baseDirectory = /dir/to/ocean/reference + +[oceanPreprocessedReference] +## options related to preprocessed ocean reference run with which the results +## will be compared (e.g. a POP, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /global/project/projectdirs/acme/ACMEv0_lowres/B1850C5_ne30_v0.4/ocn/postprocessing + +[seaIceObservations] +## options related to sea ice observations with which the results will be +## compared + +# directory where sea ice observations are stored +baseDirectory = /global/project/projectdirs/acme/observations/SeaIce +areaNH = IceArea_timeseries/iceAreaNH_climo.nc +areaSH = IceArea_timeseries/iceAreaSH_climo.nc +volNH = PIOMAS/PIOMASvolume_monthly_climo.nc +volSH = none + +[seaIceReference] +## options related to sea ice reference run with which the results will be +## compared + +# directory where sea ice reference simulation results are stored +baseDirectory = /dir/to/seaice/reference + +[seaIcePreprocessedReference] +## options related to preprocessed sea ice reference run with which the results +## will be compared (e.g. a CICE, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /global/project/projectdirs/acme/ACMEv0_lowres/B1850C5_ne30_v0.4/ice/postprocessing + +[time] +## options related to dates and times for all runs (main, reference and +## preprocessed) + +# the offset in years to be added to simulation years to prevent xarray +# calendar problems. (Hopefully, this will be removed soon.) +yearOffset = 1849 + +[remapping] +## options related to horizontal remapping + +# paths to mapping files (which will hopefully be eliminated soon) +mpasRemapFile = /global/project/projectdirs/acme/mapping/maps/map_oEC60to30_TO_0.5x0.5degree_blin.160412.nc + +[timeSeriesOHC] +## options related to plotting time series of ocean heat content (OHC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = False +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSST] +## options related to plotting time series of sea surface temperature (SST) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesNino34] +## options related to plotting time series of the El Nino 3.4 index + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMHT] +## options related to plotting time series of meridional heat transport (MHT) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMOC] +## options related to plotting time series of meridional overturning +## circulation (MOC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSeaIceAreaVol] +## options related to plotting time series of sea ice area and volume + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 1 +# title font properties +titleFontSize = 18 + +[regriddedSST] +## options related to plotting horizontally regridded sea surface temperature +## (SST) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-5, -3, -2, -1, 0, 1, 2, 3, 5] + +# alternative colormap/index suggestions +#resultColormap = viridis +#differenceColormap = RdBu_r +#differenceColormapIndices = [0, 40, 80, 127, 170, 210, 255] +#differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSSS] +## options related to plotting horizontally regridded sea surface salinity +## (SSS) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [28, 29, 30, 31, 32, 33, 34, 35, 36, 38] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedMLD] +## options related to plotting horizontally regridded mixed layer depth +## (MLD) against reference model results and observations + +# colormap for model/observations +resultColormap = viridis +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [0, 20, 40, 60, 80, 100, 150, 200, 400, 800] + +###### Xylar's note: I think something is off here. I think there should +###### be one more contour value than there are indices but there are 2 in MLD +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-175, -125, -75, -25, -10, 10, 25, 75, 125, 175] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSeaIceConcThick] +## options related to plotting horizontally regridded sea ice concentration +## and thickness against reference model results and observations + +# colormap for model/observations +resultColormap = inferno +# indices into resultColormap for contour color +resultColormapIndices = [20, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +resultConcWinterContourValues = [0.15, 0.4, 0.7, 0.9, 0.94, 0.96, 0.98, 0.99, 1] +resultConcSummerContourValues = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] +# thickness in the northern and southern hemispheres +resultThickNHContourValues = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5] +resultThickSHContourValues = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5] + +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 127, 127, 170, 210, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +differenceConcWinterContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +differenceConcSummerContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +# thickness in the northern and southern hemispheres +differenceThickNHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] +differenceThickSHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] + +# reference lat/lon for sea ice plots in the northern hemisphere +minimumLatitudeNH = 50 +referenceLongitudeNH = 0 +# reference lat/lon for sea ice plots in the southern hemisphere +minimumLatitudeSH = -50 +referenceLongitudeSH = 180 + +[regions] +## options related to ocean regions used in several analysis modules + +# list of region names (needs to be in the same order as region indices in +# time-series stats) +regions = ['arctic', 'equatorial', 'so', 'nino3', 'nino4', 'nino3.4', 'global'] +# list of plot titles (needs to be in the same order as region indices in +# time-series stats) +plotTitles = ['Arctic', 'Equatorial (15S-15N)', 'Southern Ocean', 'Nino 3', 'Nino 4', 'Nino 3.4', 'Global Ocean'] + +[plot] +## options related to plotting that are the defaults across all analysis modules + +# set to true if you want plots to be displayed (one by one) to the screen in +# addition to being written out to png files +# Note: displayToScreen = True seems to hang on Edison on large data sets, +# so suggested use is just for debugging either locally or with small data sets +displayToScreen = False + +# font size on axes +axisFontSize = 16 +# title font properties +titleFontSize = 20 +titleFontColor = black +titleFontWeight = normal diff --git a/configs/lanl/config.20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison b/configs/lanl/config.20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison new file mode 100644 index 000000000..bb24f3fa5 --- /dev/null +++ b/configs/lanl/config.20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison @@ -0,0 +1,370 @@ +[runs] +## options related to the run to be analyzed and reference runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = 20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison +# referenceRunName is the name of a reference run to compare against (or None +# to turn off comparison with a reference, e.g. if no reference case is +# available) +referenceRunName = None +# preprocessedReferenceRunName is the name of a reference run that has been +# preprocessed to compare against (or None to turn off comparison). Reference +# runs of this type would have preprocessed results because they were not +# performed with MPAS components (so they cannot be easily ingested by +# MPAS-Analysis) +preprocessedReferenceRunName = B1850C5_ne30_v0.4 + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /lustre/scratch2/turquoise/milena/ACME/20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison/run +# names of namelist and streams files. If not in baseDirectory, give full path +oceanNamelistFileName = mpas-o_in +oceanStreamsFileName = streams.ocean +seaIceNamelistFileName = mpas-cice_in +seaIceStreamsFileName = streams.cice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /dir/to/analysis/output + +# subdirectories within baseDirectory for analysis output +scratchSubdirectory = scratch +plotsSubdirectory = plots +logsSubdirectory = logs +climatologySubdirectory = clim + +# a list of analyses to generate. Valid names are: +# 'timeSeriesOHC', 'timeSeriesSST', 'regriddedSST', +# 'regriddedSSS', 'regriddedMLD', 'timeSeriesSeaIceAreaVol', +# 'regriddedSeaIceConcThick' +# the following shortcuts exist: +# 'all' -- all analyses will be run +# 'all_timeSeries' -- all time-series analyses will be run +# 'all_regriddedHorizontal' -- all analyses involving regridded horizontal +# fields will be run +# 'all_ocean' -- all ocean analyses will be run +# 'all_seaIce' -- all sea-ice analyses will be run +# 'no_timeSeriesOHC' -- skip 'timeSeriesOHC' (and similarly with the +# other analyses). +# 'no_ocean', 'no_timeSeries', etc. -- in analogy to 'all_*', skip the +# given category of analysis +# an equivalent syntax can be used on the command line to override this +# option: +# ./run_analysis.py config.analysis --generate \ +# all,no_ocean,all_timeSeries +generate = ['all'] + +# alternative examples that would perform all analysis except +# 'timeSeriesOHC' +#generate = ['timeSeriesSST', 'all_regriddedHorizontal', 'all_seaIce'] +#generate = ['all', 'no_timeSeriesOHC'] +# Each subsequent list entry can be used to alter previous list entries. For +# example, the following would produce all analyses except regriddedSST, +# regriddedSSS and regriddedMLD (albeit not in a very intuitive way): +#generate = ['all', 'no_ocean', 'all_timeSeries'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 2 +# the last year over which to average climatalogies +endYear = 3 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Using out-of-bounds values +# like start_year = 1 and end_year = 9999 will be clipped to the valid range +# of years, and is a good way of insuring that all values are used. +startYear = 2 +endYear = 3 + +[oceanObservations] +## options related to ocean observations with which the results will be compared + +# directory where ocean observations are stored +baseDirectory = /usr/projects/climate/SHARED_CLIMATE/observations +sstSubdirectory = SST +sssSubdirectory = SSS +mldSubdirectory = MLD + +[oceanReference] +## options related to ocean reference run with which the results will be +## compared + +# directory where ocean reference simulation results are stored +baseDirectory = /dir/to/ocean/reference + +[oceanPreprocessedReference] +## options related to preprocessed ocean reference run with which the results +## will be compared (e.g. a POP, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /usr/projects/climate/SHARED_CLIMATE/ACMEv0_lowres/B1850C5_ne30_v0.4/ocn/postprocessing + +[seaIceObservations] +## options related to sea ice observations with which the results will be +## compared + +# directory where sea ice observations are stored +baseDirectory = /usr/projects/climate/SHARED_CLIMATE/observations/SeaIce +areaNH = IceArea_timeseries/iceAreaNH_climo.nc +areaSH = IceArea_timeseries/iceAreaSH_climo.nc +volNH = PIOMAS/PIOMASvolume_monthly_climo.nc +volSH = none + +[seaIceReference] +## options related to sea ice reference run with which the results will be +## compared + +# directory where sea ice reference simulation results are stored +baseDirectory = /dir/to/seaice/reference + +[seaIcePreprocessedReference] +## options related to preprocessed sea ice reference run with which the results +## will be compared (e.g. a CICE, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /usr/projects/climate/SHARED_CLIMATE/ACMEv0_lowres/B1850C5_ne30_v0.4/ice/postprocessing + +[time] +## options related to dates and times for all runs (main, reference and +## preprocessed) + +# the offset in years to be added to simulation years to prevent xarray +# calendar problems. (Hopefully, this will be removed soon.) +yearOffset = 1849 + +[remapping] +## options related to horizontal remapping + +# paths to mapping files (which will hopefully be eliminated soon) +mpasRemapFile = /usr/projects/climate/milena/mapping_files/map_oEC60to30_TO_0.5x0.5degree_blin.160412.nc + +[timeSeriesOHC] +## options related to plotting time series of ocean heat content (OHC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = False +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSST] +## options related to plotting time series of sea surface temperature (SST) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesNino34] +## options related to plotting time series of the El Nino 3.4 index + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMHT] +## options related to plotting time series of meridional heat transport (MHT) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMOC] +## options related to plotting time series of meridional overturning +## circulation (MOC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSeaIceAreaVol] +## options related to plotting time series of sea ice area and volume + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 1 +# title font properties +titleFontSize = 18 + +[regriddedSST] +## options related to plotting horizontally regridded sea surface temperature +## (SST) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-5, -3, -2, -1, 0, 1, 2, 3, 5] + +# alternative colormap/index suggestions +#resultColormap = viridis +#differenceColormap = RdBu_r +#differenceColormapIndices = [0, 40, 80, 127, 170, 210, 255] +#differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSSS] +## options related to plotting horizontally regridded sea surface salinity +## (SSS) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [28, 29, 30, 31, 32, 33, 34, 35, 36, 38] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedMLD] +## options related to plotting horizontally regridded mixed layer depth +## (MLD) against reference model results and observations + +# colormap for model/observations +resultColormap = viridis +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [0, 20, 40, 60, 80, 100, 150, 200, 400, 800] + +###### Xylar's note: I think something is off here. I think there should +###### be one more contour value than there are indices but there are 2 in MLD +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-175, -125, -75, -25, -10, 10, 25, 75, 125, 175] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSeaIceConcThick] +## options related to plotting horizontally regridded sea ice concentration +## and thickness against reference model results and observations + +# colormap for model/observations +resultColormap = inferno +# indices into resultColormap for contour color +resultColormapIndices = [20, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +resultConcWinterContourValues = [0.15, 0.4, 0.7, 0.9, 0.94, 0.96, 0.98, 0.99, 1] +resultConcSummerContourValues = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] +# thickness in the northern and southern hemispheres +resultThickNHContourValues = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5] +resultThickSHContourValues = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5] + +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 127, 127, 170, 210, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +differenceConcWinterContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +differenceConcSummerContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +# thickness in the northern and southern hemispheres +differenceThickNHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] +differenceThickSHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] + +# reference lat/lon for sea ice plots in the northern hemisphere +minimumLatitudeNH = 50 +referenceLongitudeNH = 0 +# reference lat/lon for sea ice plots in the southern hemisphere +minimumLatitudeSH = -50 +referenceLongitudeSH = 180 + +[regions] +## options related to ocean regions used in several analysis modules + +# list of region names (needs to be in the same order as region indices in +# time-series stats) +regions = ['arctic', 'equatorial', 'so', 'nino3', 'nino4', 'nino3.4', 'global'] +# list of plot titles (needs to be in the same order as region indices in +# time-series stats) +plotTitles = ['Arctic', 'Equatorial (15S-15N)', 'Southern Ocean', 'Nino 3', 'Nino 4', 'Nino 3.4', 'Global Ocean'] + +[plot] +## options related to plotting that are the defaults across all analysis modules + +# set to true if you want plots to be displayed (one by one) to the screen in +# addition to being written out to png files +# Note: displayToScreen = True seems to hang on Edison on large data sets, +# so suggested use is just for debugging either locally or with small data sets +displayToScreen = False + +# font size on axes +axisFontSize = 16 +# title font properties +titleFontSize = 20 +titleFontColor = black +titleFontWeight = normal diff --git a/configs/lanl/config.20170120.beta0.GMPAS-QU240.wolf b/configs/lanl/config.20170120.beta0.GMPAS-QU240.wolf new file mode 100644 index 000000000..093c41935 --- /dev/null +++ b/configs/lanl/config.20170120.beta0.GMPAS-QU240.wolf @@ -0,0 +1,370 @@ +[runs] +## options related to the run to be analyzed and reference runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = 20170120.beta0.GMPAS-QU240.wolf +# referenceRunName is the name of a reference run to compare against (or None +# to turn off comparison with a reference, e.g. if no reference case is +# available) +referenceRunName = None +# preprocessedReferenceRunName is the name of a reference run that has been +# preprocessed to compare against (or None to turn off comparison). Reference +# runs of this type would have preprocessed results because they were not +# performed with MPAS components (so they cannot be easily ingested by +# MPAS-Analysis) +preprocessedReferenceRunName = B1850C5_ne30_v0.4 + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /lustre/scratch2/turquoise/xylar/ACME/cases/GMPAS-QU240/run +# names of namelist and streams files. If not in baseDirectory, give full path +oceanNamelistFileName = mpas-o_in +oceanStreamsFileName = streams.ocean +seaIceNamelistFileName = mpas-cice_in +seaIceStreamsFileName = streams.cice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /dir/to/analysis/output + +# subdirectories within baseDirectory for analysis output +scratchSubdirectory = scratch +plotsSubdirectory = plots +logsSubdirectory = logs +climatologySubdirectory = clim + +# a list of analyses to generate. Valid names are: +# 'timeSeriesOHC', 'timeSeriesSST', 'regriddedSST', +# 'regriddedSSS', 'regriddedMLD', 'timeSeriesSeaIceAreaVol', +# 'regriddedSeaIceConcThick' +# the following shortcuts exist: +# 'all' -- all analyses will be run +# 'all_timeSeries' -- all time-series analyses will be run +# 'all_regriddedHorizontal' -- all analyses involving regridded horizontal +# fields will be run +# 'all_ocean' -- all ocean analyses will be run +# 'all_seaIce' -- all sea-ice analyses will be run +# 'no_timeSeriesOHC' -- skip 'timeSeriesOHC' (and similarly with the +# other analyses). +# 'no_ocean', 'no_timeSeries', etc. -- in analogy to 'all_*', skip the +# given category of analysis +# an equivalent syntax can be used on the command line to override this +# option: +# ./run_analysis.py config.analysis --generate \ +# all,no_ocean,all_timeSeries +generate = ['all', 'no_regriddedSeaIceConcThick'] + +# alternative examples that would perform all analysis except +# 'timeSeriesOHC' +#generate = ['timeSeriesSST', 'all_regriddedHorizontal', 'all_seaIce'] +#generate = ['all', 'no_timeSeriesOHC'] +# Each subsequent list entry can be used to alter previous list entries. For +# example, the following would produce all analyses except regriddedSST, +# regriddedSSS and regriddedMLD (albeit not in a very intuitive way): +#generate = ['all', 'no_ocean', 'all_timeSeries'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 1 +# the last year over which to average climatalogies +endYear = 3 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Using out-of-bounds values +# like start_year = 1 and end_year = 9999 will be clipped to the valid range +# of years, and is a good way of insuring that all values are used. +startYear = 1 +endYear = 3 + +[oceanObservations] +## options related to ocean observations with which the results will be compared + +# directory where ocean observations are stored +baseDirectory = /usr/projects/climate/SHARED_CLIMATE/observations +sstSubdirectory = SST +sssSubdirectory = SSS +mldSubdirectory = MLD + +[oceanReference] +## options related to ocean reference run with which the results will be +## compared + +# directory where ocean reference simulation results are stored +baseDirectory = /dir/to/ocean/reference + +[oceanPreprocessedReference] +## options related to preprocessed ocean reference run with which the results +## will be compared (e.g. a POP, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /usr/projects/climate/SHARED_CLIMATE/ACMEv0_lowres/B1850C5_ne30_v0.4/ocn/postprocessing + +[seaIceObservations] +## options related to sea ice observations with which the results will be +## compared + +# directory where sea ice observations are stored +baseDirectory = /usr/projects/climate/SHARED_CLIMATE/observations/SeaIce +areaNH = IceArea_timeseries/iceAreaNH_climo.nc +areaSH = IceArea_timeseries/iceAreaSH_climo.nc +volNH = PIOMAS/PIOMASvolume_monthly_climo.nc +volSH = none + +[seaIceReference] +## options related to sea ice reference run with which the results will be +## compared + +# directory where sea ice reference simulation results are stored +baseDirectory = /dir/to/seaice/reference + +[seaIcePreprocessedReference] +## options related to preprocessed sea ice reference run with which the results +## will be compared (e.g. a CICE, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /usr/projects/climate/SHARED_CLIMATE/ACMEv0_lowres/B1850C5_ne30_v0.4/ice/postprocessing + +[time] +## options related to dates and times for all runs (main, reference and +## preprocessed) + +# the offset in years to be added to simulation years to prevent xarray +# calendar problems. (Hopefully, this will be removed soon.) +yearOffset = 1849 + +[remapping] +## options related to horizontal remapping + +# paths to mapping files (which will hopefully be eliminated soon) +mpasRemapFile = /usr/projects/climate/milena/mapping_files/rmp_QU240_to_0.5x0.5_conserve.nc + +[timeSeriesOHC] +## options related to plotting time series of ocean heat content (OHC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = False +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSST] +## options related to plotting time series of sea surface temperature (SST) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesNino34] +## options related to plotting time series of the El Nino 3.4 index + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMHT] +## options related to plotting time series of meridional heat transport (MHT) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMOC] +## options related to plotting time series of meridional overturning +## circulation (MOC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSeaIceAreaVol] +## options related to plotting time series of sea ice area and volume + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 1 +# title font properties +titleFontSize = 18 + +[regriddedSST] +## options related to plotting horizontally regridded sea surface temperature +## (SST) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-5, -3, -2, -1, 0, 1, 2, 3, 5] + +# alternative colormap/index suggestions +#resultColormap = viridis +#differenceColormap = RdBu_r +#differenceColormapIndices = [0, 40, 80, 127, 170, 210, 255] +#differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSSS] +## options related to plotting horizontally regridded sea surface salinity +## (SSS) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [28, 29, 30, 31, 32, 33, 34, 35, 36, 38] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedMLD] +## options related to plotting horizontally regridded mixed layer depth +## (MLD) against reference model results and observations + +# colormap for model/observations +resultColormap = viridis +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [0, 20, 40, 60, 80, 100, 150, 200, 400, 800] + +###### Xylar's note: I think something is off here. I think there should +###### be one more contour value than there are indices but there are 2 in MLD +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-175, -125, -75, -25, -10, 10, 25, 75, 125, 175] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSeaIceConcThick] +## options related to plotting horizontally regridded sea ice concentration +## and thickness against reference model results and observations + +# colormap for model/observations +resultColormap = inferno +# indices into resultColormap for contour color +resultColormapIndices = [20, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +resultConcWinterContourValues = [0.15, 0.4, 0.7, 0.9, 0.94, 0.96, 0.98, 0.99, 1] +resultConcSummerContourValues = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] +# thickness in the northern and southern hemispheres +resultThickNHContourValues = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5] +resultThickSHContourValues = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5] + +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 127, 127, 170, 210, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +differenceConcWinterContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +differenceConcSummerContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +# thickness in the northern and southern hemispheres +differenceThickNHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] +differenceThickSHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] + +# reference lat/lon for sea ice plots in the northern hemisphere +minimumLatitudeNH = 50 +referenceLongitudeNH = 0 +# reference lat/lon for sea ice plots in the southern hemisphere +minimumLatitudeSH = -50 +referenceLongitudeSH = 180 + +[regions] +## options related to ocean regions used in several analysis modules + +# list of region names (needs to be in the same order as region indices in +# time-series stats) +regions = ['arctic', 'equatorial', 'so', 'nino3', 'nino4', 'nino3.4', 'global'] +# list of plot titles (needs to be in the same order as region indices in +# time-series stats) +plotTitles = ['Arctic', 'Equatorial (15S-15N)', 'Southern Ocean', 'Nino 3', 'Nino 4', 'Nino 3.4', 'Global Ocean'] + +[plot] +## options related to plotting that are the defaults across all analysis modules + +# set to true if you want plots to be displayed (one by one) to the screen in +# addition to being written out to png files +# Note: displayToScreen = True seems to hang on Edison on large data sets, +# so suggested use is just for debugging either locally or with small data sets +displayToScreen = False + +# font size on axes +axisFontSize = 16 +# title font properties +titleFontSize = 20 +titleFontColor = black +titleFontWeight = normal diff --git a/configs/olcf/config.20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison b/configs/olcf/config.20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison new file mode 100644 index 000000000..cf0fe444a --- /dev/null +++ b/configs/olcf/config.20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison @@ -0,0 +1,370 @@ +[runs] +## options related to the run to be analyzed and reference runs to be +## compared against + +# mainRunName is a name that identifies the simulation being analyzed. +mainRunName = 20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison +# referenceRunName is the name of a reference run to compare against (or None +# to turn off comparison with a reference, e.g. if no reference case is +# available) +referenceRunName = None +# preprocessedReferenceRunName is the name of a reference run that has been +# preprocessed to compare against (or None to turn off comparison). Reference +# runs of this type would have preprocessed results because they were not +# performed with MPAS components (so they cannot be easily ingested by +# MPAS-Analysis) +preprocessedReferenceRunName = B1850C5_ne30_v0.4 + +[input] +## options related to reading in the results to be analyzed + +# directory containing model results +baseDirectory = /lustre/atlas1/cli115/proj-shared/mbranst/20161117.beta0.A_WCYCL1850S.ne30_oEC_ICG.edison/run +# names of namelist and streams files. If not in baseDirectory, give full path +oceanNamelistFileName = mpas-o_in +oceanStreamsFileName = streams.ocean +seaIceNamelistFileName = mpas-cice_in +seaIceStreamsFileName = streams.cice + +[output] +## options related to writing out plots, intermediate cached data sets, logs, +## etc. + +# directory where analysis should be written +baseDirectory = /dir/to/analysis/output + +# subdirectories within baseDirectory for analysis output +scratchSubdirectory = scratch +plotsSubdirectory = plots +logsSubdirectory = logs +climatologySubdirectory = clim + +# a list of analyses to generate. Valid names are: +# 'timeSeriesOHC', 'timeSeriesSST', 'regriddedSST', +# 'regriddedSSS', 'regriddedMLD', 'timeSeriesSeaIceAreaVol', +# 'regriddedSeaIceConcThick' +# the following shortcuts exist: +# 'all' -- all analyses will be run +# 'all_timeSeries' -- all time-series analyses will be run +# 'all_regriddedHorizontal' -- all analyses involving regridded horizontal +# fields will be run +# 'all_ocean' -- all ocean analyses will be run +# 'all_seaIce' -- all sea-ice analyses will be run +# 'no_timeSeriesOHC' -- skip 'timeSeriesOHC' (and similarly with the +# other analyses). +# 'no_ocean', 'no_timeSeries', etc. -- in analogy to 'all_*', skip the +# given category of analysis +# an equivalent syntax can be used on the command line to override this +# option: +# ./run_analysis.py config.analysis --generate \ +# all,no_ocean,all_timeSeries +generate = ['all'] + +# alternative examples that would perform all analysis except +# 'timeSeriesOHC' +#generate = ['timeSeriesSST', 'all_regriddedHorizontal', 'all_seaIce'] +#generate = ['all', 'no_timeSeriesOHC'] +# Each subsequent list entry can be used to alter previous list entries. For +# example, the following would produce all analyses except regriddedSST, +# regriddedSSS and regriddedMLD (albeit not in a very intuitive way): +#generate = ['all', 'no_ocean', 'all_timeSeries'] + +[climatology] +## options related to producing climatologies, typically to compare against +## observations and previous runs + +# the first year over which to average climatalogies +startYear = 131 +# the last year over which to average climatalogies +endYear = 140 + +[timeSeries] +## options related to producing time series plots, often to compare against +## observations and previous runs + +# start and end years for timeseries analysis. Using out-of-bounds values +# like start_year = 1 and end_year = 9999 will be clipped to the valid range +# of years, and is a good way of insuring that all values are used. +startYear = 131 +endYear = 140 + +[oceanObservations] +## options related to ocean observations with which the results will be compared + +# directory where ocean observations are stored +baseDirectory = /lustre/atlas/proj-shared/cli115/observations +sstSubdirectory = SST +sssSubdirectory = SSS +mldSubdirectory = MLD + +[oceanReference] +## options related to ocean reference run with which the results will be +## compared + +# directory where ocean reference simulation results are stored +baseDirectory = /dir/to/ocean/reference + +[oceanPreprocessedReference] +## options related to preprocessed ocean reference run with which the results +## will be compared (e.g. a POP, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /lustre/atlas/proj-shared/cli115/milena/ACMEv0_lowres/B1850C5_ne30_v0.4/ocn/postprocessing + +[seaIceObservations] +## options related to sea ice observations with which the results will be +## compared + +# directory where sea ice observations are stored +baseDirectory = /lustre/atlas/proj-shared/cli115/observations/SeaIce +areaNH = IceArea_timeseries/iceAreaNH_climo.nc +areaSH = IceArea_timeseries/iceAreaSH_climo.nc +volNH = PIOMAS/PIOMASvolume_monthly_climo.nc +volSH = none + +[seaIceReference] +## options related to sea ice reference run with which the results will be +## compared + +# directory where sea ice reference simulation results are stored +baseDirectory = /dir/to/seaice/reference + +[seaIcePreprocessedReference] +## options related to preprocessed sea ice reference run with which the results +## will be compared (e.g. a CICE, CESM or ACME v0 run) + +# directory where ocean reference simulation results are stored +baseDirectory = /lustre/atlas/proj-shared/cli115/milena/ACMEv0_lowres/B1850C5_ne30_v0.4/ice/postprocessing + +[time] +## options related to dates and times for all runs (main, reference and +## preprocessed) + +# the offset in years to be added to simulation years to prevent xarray +# calendar problems. (Hopefully, this will be removed soon.) +yearOffset = 1849 + +[remapping] +## options related to horizontal remapping + +# paths to mapping files (which will hopefully be eliminated soon) +mpasRemapFile = /lustre/atlas/proj-shared/cli115/milena/mapping_files/map_oEC60to30_TO_0.5x0.5degree_blin.160412.nc + +[timeSeriesOHC] +## options related to plotting time series of ocean heat content (OHC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = False +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSST] +## options related to plotting time series of sea surface temperature (SST) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesNino34] +## options related to plotting time series of the El Nino 3.4 index + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMHT] +## options related to plotting time series of meridional heat transport (MHT) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesMOC] +## options related to plotting time series of meridional overturning +## circulation (MOC) + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 + +[timeSeriesSeaIceAreaVol] +## options related to plotting time series of sea ice area and volume + +## compare to output from another model run? +#compareWithModel = True +# compare to observations? +compareWithObservations = True +# list of region indices to plot from the region list in [regions] below +regionIndicesToPlot = [6] +# Number of points over which to compute moving average (e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 1 +# title font properties +titleFontSize = 18 + +[regriddedSST] +## options related to plotting horizontally regridded sea surface temperature +## (SST) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-5, -3, -2, -1, 0, 1, 2, 3, 5] + +# alternative colormap/index suggestions +#resultColormap = viridis +#differenceColormap = RdBu_r +#differenceColormapIndices = [0, 40, 80, 127, 170, 210, 255] +#differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSSS] +## options related to plotting horizontally regridded sea surface salinity +## (SSS) against reference model results and observations + +# colormap for model/observations +resultColormap = RdYlBu_r +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [28, 29, 30, 31, 32, 33, 34, 35, 36, 38] + +# colormap for differences +differenceColormap = coolwarm +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-3, -2, -1, -0.5, 0.5, 1, 2, 3] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedMLD] +## options related to plotting horizontally regridded mixed layer depth +## (MLD) against reference model results and observations + +# colormap for model/observations +resultColormap = viridis +# indices into resultColormap for contour color +resultColormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +resultContourValues = [0, 20, 40, 60, 80, 100, 150, 200, 400, 800] + +###### Xylar's note: I think something is off here. I think there should +###### be one more contour value than there are indices but there are 2 in MLD +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 120, 140, 170, 210, 255] +# colormap levels/values for contour boundaries +differenceContourValues = [-175, -125, -75, -25, -10, 10, 25, 75, 125, 175] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) +comparisonTimes = ['JFM', 'JAS', 'ANN'] + +[regriddedSeaIceConcThick] +## options related to plotting horizontally regridded sea ice concentration +## and thickness against reference model results and observations + +# colormap for model/observations +resultColormap = inferno +# indices into resultColormap for contour color +resultColormapIndices = [20, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +resultConcWinterContourValues = [0.15, 0.4, 0.7, 0.9, 0.94, 0.96, 0.98, 0.99, 1] +resultConcSummerContourValues = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] +# thickness in the northern and southern hemispheres +resultThickNHContourValues = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5] +resultThickSHContourValues = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5] + +# colormap for differences +differenceColormap = RdBu_r +# indices into differenceColormap for contour color +differenceColormapIndices = [0, 40, 80, 127, 127, 170, 210, 255] +# colormap levels/values for contour boundaries for: +# concentration in winter and summer +differenceConcWinterContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +differenceConcSummerContourValues = [-0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8] +# thickness in the northern and southern hemispheres +differenceThickNHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] +differenceThickSHContourValues = [-2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5] + +# reference lat/lon for sea ice plots in the northern hemisphere +minimumLatitudeNH = 50 +referenceLongitudeNH = 0 +# reference lat/lon for sea ice plots in the southern hemisphere +minimumLatitudeSH = -50 +referenceLongitudeSH = 180 + +[regions] +## options related to ocean regions used in several analysis modules + +# list of region names (needs to be in the same order as region indices in +# time-series stats) +regions = ['arctic', 'equatorial', 'so', 'nino3', 'nino4', 'nino3.4', 'global'] +# list of plot titles (needs to be in the same order as region indices in +# time-series stats) +plotTitles = ['Arctic', 'Equatorial (15S-15N)', 'Southern Ocean', 'Nino 3', 'Nino 4', 'Nino 3.4', 'Global Ocean'] + +[plot] +## options related to plotting that are the defaults across all analysis modules + +# set to true if you want plots to be displayed (one by one) to the screen in +# addition to being written out to png files +# Note: displayToScreen = True seems to hang on Edison on large data sets, +# so suggested use is just for debugging either locally or with small data sets +displayToScreen = False + +# font size on axes +axisFontSize = 16 +# title font properties +titleFontSize = 20 +titleFontColor = black +titleFontWeight = normal diff --git a/mpas_analysis/ocean/ocean_modelvsobs.py b/mpas_analysis/ocean/ocean_modelvsobs.py index 1ce82406d..750d709f8 100644 --- a/mpas_analysis/ocean/ocean_modelvsobs.py +++ b/mpas_analysis/ocean/ocean_modelvsobs.py @@ -1,10 +1,11 @@ #!/usr/bin/env python """ General comparison of 2-d model fields against data. Currently only supports -mixed layer depths (mld) and sea surface temperature (sst) +sea surface temperature (sst), sea surface salinity (sss) and mixed layer +depth (mld) Author: Luke Van Roekel, Milena Veneziani, Xylar Asay-Davis -Last Modified: 12/06/2016 +Last Modified: 02/02/2017 """ import matplotlib.pyplot as plt @@ -22,6 +23,7 @@ from ..shared.constants import constants from ..shared.io import StreamsFile +from ..shared.io.utility import buildConfigFullPath def ocn_modelvsobs(config, field, streamMap=None, variableMap=None): @@ -32,8 +34,8 @@ def ocn_modelvsobs(config, field, streamMap=None, variableMap=None): config is an instance of MpasAnalysisConfigParser containing configuration options. - field is the name of a field to be analyize (currently one of 'mld' or - 'sst') + field is the name of a field to be analyize (currently one of 'sst', 'sss' + or 'mld') If present, streamMap is a dictionary of MPAS-O stream names that map to their mpas_analysis counterparts. @@ -42,44 +44,46 @@ def ocn_modelvsobs(config, field, streamMap=None, variableMap=None): to their mpas_analysis counterparts. Authors: Luke Van Roekel, Milena Veneziani, Xylar Asay-Davis - Modified: 12/08/2016 + Last Modified: 02/02/2017 """ # read parameters from config file - indir = config.get('paths', 'archive_dir_ocn') + inDirectory = config.get('input', 'baseDirectory') - streams_filename = config.get('input', 'ocean_streams_filename') - streams = StreamsFile(streams_filename, streamsdir=indir) + streamsFileName = config.get('input', 'oceanStreamsFileName') + streams = StreamsFile(streamsFileName, streamsdir=inDirectory) # get a list of timeSeriesStats output files from the streams file, # reading only those that are between the start and end dates - startDate = config.get('time', 'climo_start_date') - endDate = config.get('time', 'climo_end_date') + startDate = config.get('climatology', 'startDate') + endDate = config.get('climatology', 'endDate') streamName = streams.find_stream(streamMap['timeSeriesStats']) - infiles = streams.readpath(streamName, startDate=startDate, - endDate=endDate) - print 'Reading files {} through {}'.format(infiles[0], infiles[-1]) + inputFiles = streams.readpath(streamName, startDate=startDate, + endDate=endDate) + print 'Reading files {} through {}'.format(inputFiles[0], inputFiles[-1]) - plots_dir = config.get('paths', 'plots_dir') - obsdir = config.get('paths', 'obs_' + field + 'dir') - casename = config.get('case', 'casename') + plotsDirectory = buildConfigFullPath(config, 'output', 'plotsSubdirectory') + observationsDirectory = buildConfigFullPath(config, 'oceanObservations', + '{}Subdirectory'.format(field)) + mainRunName = config.get('runs', 'mainRunName') try: - inputfile = streams.readpath('restart')[0] + restartFile = streams.readpath('restart')[0] except ValueError: raise IOError('No MPAS-O restart file found: need at least one ' 'restart file for ocn_modelvsobs calculation') - climo_yr1 = config.getint('time', 'climo_yr1') - climo_yr2 = config.getint('time', 'climo_yr2') - yr_offset = config.getint('time', 'yr_offset') + startYear = config.getint('climatology', 'startYear') + endYear = config.getint('climatology', 'endYear') + yearOffset = config.getint('time', 'yearOffset') - outputTimes = config.getExpression(field + '_modelvsobs', - 'comparisonTimes') + sectionName = 'regridded{}'.format(field.upper()) + outputTimes = config.getExpression(sectionName, 'comparisonTimes') - f = netcdf_dataset(inputfile, mode='r') - lonCell = f.variables["lonCell"][:] - latCell = f.variables["latCell"][:] + ncFile = netcdf_dataset(restartFile, mode='r') + lonCell = ncFile.variables["lonCell"][:] + latCell = ncFile.variables["latCell"][:] + ncFile.close() varList = [field] @@ -88,112 +92,120 @@ def ocn_modelvsobs(config, field, streamMap=None, variableMap=None): selvals = None # Load MLD observational data - obs_filename = "{}/holtetalley_mld_climatology.nc".format(obsdir) - dsData = xr.open_mfdataset(obs_filename) + obsFileName = "{}/holtetalley_mld_climatology.nc".format( + observationsDirectory) + dsObs = xr.open_mfdataset(obsFileName) # Increment month value to be consistent with the model output - dsData.iMONTH.values += 1 + dsObs.iMONTH.values += 1 # Rename the time dimension to be consistent with the SST dataset - dsData.rename({'month': 'calmonth'}, inplace=True) - dsData.rename({'iMONTH': 'month'}, inplace=True) - dsData.coords['month'] = dsData['calmonth'] + dsObs.rename({'month': 'calmonth'}, inplace=True) + dsObs.rename({'iMONTH': 'month'}, inplace=True) + dsObs.coords['month'] = dsObs['calmonth'] obsFieldName = 'mld_dt_mean' # Reorder dataset for consistence - dsData = dsData.transpose('month', 'iLON', 'iLAT') + dsObs = dsObs.transpose('month', 'iLON', 'iLAT') # Set appropriate MLD figure labels - obsTitleLabel = "Observations (HolteTalley density threshold MLD)" - fileOutLabel = "mldHolteTalleyARGO" + observationTitleLabel = \ + "Observations (HolteTalley density threshold MLD)" + outFileLabel = "mldHolteTalleyARGO" unitsLabel = 'm' elif field == 'sst': selvals = {'nVertLevels': 0} - obs_filename = \ - "{}/MODEL.SST.HAD187001-198110.OI198111-201203.nc".format(obsdir) - dsData = xr.open_mfdataset(obs_filename) + obsFileName = \ + "{}/MODEL.SST.HAD187001-198110.OI198111-201203.nc".format( + observationsDirectory) + dsObs = xr.open_mfdataset(obsFileName) # Select years for averaging (pre-industrial or present-day) # This seems fragile as definitions can change - if yr_offset < 1900: - time_start = datetime.datetime(1870, 1, 1) - time_end = datetime.datetime(1900, 12, 31) - preIndustrial_txt = "pre-industrial 1870-1900" + if yearOffset < 1900: + timeStart = datetime.datetime(1870, 1, 1) + timeEnd = datetime.datetime(1900, 12, 31) + preindustrialText = "pre-industrial 1870-1900" else: - time_start = datetime.datetime(1990, 1, 1) - time_end = datetime.datetime(2011, 12, 31) - preIndustrial_txt = "present-day 1990-2011" + timeStart = datetime.datetime(1990, 1, 1) + timeEnd = datetime.datetime(2011, 12, 31) + preindustrialText = "present-day 1990-2011" - ds_tslice = dsData.sel(time=slice(time_start, time_end)) - monthly_clim_data = ds_tslice.groupby('time.month').mean('time') + dsTimeSlice = dsObs.sel(time=slice(timeStart, timeEnd)) + monthlyClimatology = dsTimeSlice.groupby('time.month').mean('time') # Rename the observation data for code compactness - dsData = monthly_clim_data.transpose('month', 'lon', 'lat') + dsObs = monthlyClimatology.transpose('month', 'lon', 'lat') obsFieldName = 'SST' # Set appropriate figure labels for SST - obsTitleLabel = \ - "Observations (Hadley/OI, {})".format(preIndustrial_txt) - fileOutLabel = "sstHADOI" + observationTitleLabel = \ + "Observations (Hadley/OI, {})".format(preindustrialText) + outFileLabel = "sstHADOI" unitsLabel = r'$^o$C' elif field == 'sss': selvals = {'nVertLevels': 0} - obs_filename = "{}/Aquarius_V3_SSS_Monthly.nc".format(obsdir) - dsData = xr.open_mfdataset(obs_filename) + obsFileName = "{}/Aquarius_V3_SSS_Monthly.nc".format( + observationsDirectory) + dsObs = xr.open_mfdataset(obsFileName) - time_start = datetime.datetime(2011, 8, 1) - time_end = datetime.datetime(2014, 12, 31) + timeStart = datetime.datetime(2011, 8, 1) + timeEnd = datetime.datetime(2014, 12, 31) - ds_tslice = dsData.sel(time=slice(time_start, time_end)) + dsTimeSlice = dsObs.sel(time=slice(timeStart, timeEnd)) # The following line converts from DASK to numpy to supress an odd # warning that doesn't influence the figure output - ds_tslice.SSS.values + dsTimeSlice.SSS.values - monthly_clim_data = ds_tslice.groupby('time.month').mean('time') + monthlyClimatology = dsTimeSlice.groupby('time.month').mean('time') # Rename the observation data for code compactness - dsData = monthly_clim_data.transpose('month', 'lon', 'lat') + dsObs = monthlyClimatology.transpose('month', 'lon', 'lat') obsFieldName = 'SSS' # Set appropriate figure labels for SSS - preIndustrial_txt = "2011-2014" + preindustrialText = "2011-2014" - obsTitleLabel = "Observations (Aquarius, {})".format(preIndustrial_txt) - fileOutLabel = 'sssAquarius' + observationTitleLabel = "Observations (Aquarius, {})".format( + preindustrialText) + outFileLabel = 'sssAquarius' unitsLabel = 'PSU' ds = xr.open_mfdataset( - infiles, - preprocess=lambda x: preprocess_mpas(x, yearoffset=yr_offset, + inputFiles, + preprocess=lambda x: preprocess_mpas(x, yearoffset=yearOffset, timestr='Time', onlyvars=varList, selvals=selvals, varmap=variableMap)) ds = remove_repeated_time_index(ds) - time_start = datetime.datetime(yr_offset+climo_yr1, 1, 1) - time_end = datetime.datetime(yr_offset+climo_yr2, 12, 31) - ds_tslice = ds.sel(Time=slice(time_start, time_end)) - monthly_clim = ds_tslice.groupby('Time.month').mean('Time') + timeStart = datetime.datetime(yearOffset+startYear, 1, 1) + timeEnd = datetime.datetime(yearOffset+endYear, 12, 31) + dsTimeSlice = ds.sel(Time=slice(timeStart, timeEnd)) + monthlyClimatology = dsTimeSlice.groupby('Time.month').mean('Time') - latData, lonData = np.meshgrid(dsData.lat.values, dsData.lon.values) + latData, lonData = np.meshgrid(dsObs.lat.values, + dsObs.lon.values) latData = latData.flatten() lonData = lonData.flatten() - daysarray = np.ones((12, dsData[obsFieldName].values.shape[1], - dsData[obsFieldName].values.shape[2])) + daysarray = np.ones((12, + dsObs[obsFieldName].values.shape[1], + dsObs[obsFieldName].values.shape[2])) - for i, dval in enumerate(constants.dinmonth): - daysarray[i, :, :] = dval - inds = np.where(np.isnan(dsData[obsFieldName][i, :, :].values)) - daysarray[i, inds[0], inds[1]] = np.NaN + for monthIndex, dval in enumerate(constants.dinmonth): + daysarray[monthIndex, :, :] = dval + inds = np.where(np.isnan( + dsObs[obsFieldName][monthIndex, :, :].values)) + daysarray[monthIndex, inds[0], inds[1]] = np.NaN # initialize interpolation variables d2, inds2, lonTarg, latTarg = init_tree(np.rad2deg(lonCell), @@ -219,63 +231,72 @@ def ocn_modelvsobs(config, field, streamMap=None, variableMap=None): bias = np.zeros((len(outputTimes), nLon, nLat)) # Interpolate and compute biases - for i, timestring in enumerate(outputTimes): - monthsvalue = constants.monthdictionary[timestring] + for timeIndex, timestring in enumerate(outputTimes): + monthsValue = constants.monthdictionary[timestring] - if isinstance(monthsvalue, (int, long)): - modeldata = monthly_clim.sel(month=monthsvalue)[field].values - obsdata = dsData.sel(month=monthsvalue)[obsFieldName].values + if isinstance(monthsValue, (int, long)): + modelData = monthlyClimatology.sel(month=monthsValue)[field].values + obsData = dsObs.sel( + month=monthsValue)[obsFieldName].values else: - modeldata = (np.sum( - constants.dinmonth[monthsvalue-1] * - monthly_clim.sel(month=monthsvalue)[field].values.T, axis=1) / - np.sum(constants.dinmonth[monthsvalue-1])) - obsdata = (np.nansum( - daysarray[monthsvalue-1, :, :] * - dsData.sel(month=monthsvalue)[obsFieldName].values, axis=0) / - np.nansum(daysarray[monthsvalue-1, :, :], axis=0)) - - modelOutput[i, :, :] = interp_fields(modeldata, d2, inds2, lonTarg) - observations[i, :, :] = interp_fields(obsdata.flatten(), d, inds, - lonTargD) - - for i in range(len(outputTimes)): - bias[i, :, :] = modelOutput[i, :, :] - observations[i, :, :] - - clevsModelObs = config.getExpression(field + '_modelvsobs', - 'clevsModelObs') - cmap = plt.get_cmap(config.get(field + '_modelvsobs', - 'cmapModelObs')) - cmapIndices = config.getExpression(field + '_modelvsobs', - 'cmapIndicesModelObs') - cmapModelObs = cols.ListedColormap(cmap(cmapIndices), "cmapModelObs") - clevsDiff = config.getExpression(field + '_modelvsobs', - 'clevsDiff') - cmap = plt.get_cmap(config.get(field + '_modelvsobs', 'cmapDiff')) - cmapIndices = config.getExpression(field + '_modelvsobs', - 'cmapIndicesDiff') - cmapDiff = cols.ListedColormap(cmap(cmapIndices), "cmapDiff") - - for i in range(len(outputTimes)): - fileout = "{}/{}_{}_{}_years{:04d}-{:04d}.png".format( - plots_dir, fileOutLabel, casename, outputTimes[i], climo_yr1, - climo_yr2) + modelData = (np.sum( + constants.dinmonth[monthsValue-1] * + monthlyClimatology.sel(month=monthsValue)[field].values.T, + axis=1) / + np.sum(constants.dinmonth[monthsValue-1])) + obsData = \ + (np.nansum( + daysarray[monthsValue-1, :, :] * + dsObs.sel(month=monthsValue)[obsFieldName].values, + axis=0) / + np.nansum(daysarray[monthsValue-1, :, :], axis=0)) + + modelOutput[timeIndex, :, :] = interp_fields(modelData, d2, inds2, + lonTarg) + observations[timeIndex, :, :] = interp_fields(obsData.flatten(), d, + inds, lonTargD) + + for timeIndex in range(len(outputTimes)): + bias[timeIndex, :, :] = (modelOutput[timeIndex, :, :] - + observations[timeIndex, :, :]) + + resultContourValues = config.getExpression(sectionName, + 'resultContourValues') + resultColormap = plt.get_cmap(config.get(sectionName, 'resultColormap')) + resultColormapIndices = config.getExpression(sectionName, + 'resultColormapIndices') + resultColormap = cols.ListedColormap(resultColormap(resultColormapIndices), + "resultColormap") + differenceContourValues = config.getExpression(sectionName, + 'differenceContourValues') + differenceColormap = plt.get_cmap(config.get(sectionName, + 'differenceColormap')) + differenceColormapIndices = config.getExpression( + sectionName, 'differenceColormapIndices') + differenceColormap = cols.ListedColormap( + differenceColormap(differenceColormapIndices), + "differenceColormap") + + for timeIndex in range(len(outputTimes)): + outFileName = "{}/{}_{}_{}_years{:04d}-{:04d}.png".format( + plotsDirectory, outFileLabel, mainRunName, + outputTimes[timeIndex], startYear, endYear) title = "{} ({}, years {:04d}-{:04d})".format( - field.upper(), outputTimes[i], climo_yr1, climo_yr2) + field.upper(), outputTimes[timeIndex], startYear, endYear) plot_global_comparison(config, lonTarg, latTarg, - modelOutput[i, :, :], - observations[i, :, :], - bias[i, :, :], - cmapModelObs, - clevsModelObs, - cmapDiff, - clevsDiff, - fileout=fileout, + modelOutput[timeIndex, :, :], + observations[timeIndex, :, :], + bias[timeIndex, :, :], + resultColormap, + resultContourValues, + differenceColormap, + differenceContourValues, + fileout=outFileName, title=title, - modelTitle="{}".format(casename), - obsTitle=obsTitleLabel, + modelTitle="{}".format(mainRunName), + obsTitle=observationTitleLabel, diffTitle="Model-Observations", cbarlabel=unitsLabel) diff --git a/mpas_analysis/ocean/ohc_timeseries.py b/mpas_analysis/ocean/ohc_timeseries.py index b6c78634d..f4ecbe69b 100644 --- a/mpas_analysis/ocean/ohc_timeseries.py +++ b/mpas_analysis/ocean/ohc_timeseries.py @@ -11,6 +11,7 @@ from ..shared.plot.plotting import timeseries_analysis_plot from ..shared.io import NameList, StreamsFile +from ..shared.io.utility import buildConfigFullPath from ..shared.timekeeping.Date import Date @@ -31,65 +32,72 @@ def ohc_timeseries(config, streamMap=None, variableMap=None): to their mpas_analysis counterparts. Author: Xylar Asay-Davis, Milena Veneziani - Last Modified: 01/07/2017 + Last Modified: 02/02/2017 """ # read parameters from config file - casename = config.get('case', 'casename') - ref_casename_v0 = config.get('case', 'ref_casename_v0') - indir_v0data = config.get('paths', 'ref_archive_v0_ocndir') + mainRunName = config.get('runs', 'mainRunName') + preprocessedReferenceRunName = config.get('runs', + 'preprocessedReferenceRunName') + preprocessedInputDirectory = config.get('oceanPreprocessedReference', + 'baseDirectory') - compare_with_obs = config.getboolean('ohc_timeseries', 'compare_with_obs') + compareWithObservations = config.getboolean('timeSeriesOHC', + 'compareWithObservations') - plots_dir = config.get('paths', 'plots_dir') + plotsDirectory = buildConfigFullPath(config, 'output', 'plotsSubdirectory') - yr_offset = config.getint('time', 'yr_offset') + yearOffset = config.getint('time', 'yearOffset') - N_movavg = config.getint('ohc_timeseries', 'N_movavg') + movingAveragePoints = config.getint('timeSeriesOHC', 'movingAveragePoints') regions = config.getExpression('regions', 'regions') - plot_titles = config.getExpression('regions', 'plot_titles') - iregions = config.getExpression('ohc_timeseries', 'regionIndicesToPlot') + plotTitles = config.getExpression('regions', 'plotTitles') + regionIndicesToPlot = config.getExpression('timeSeriesOHC', + 'regionIndicesToPlot') - indir = config.get('paths', 'archive_dir_ocn') + inDirectory = config.get('input', 'baseDirectory') - namelist_filename = config.get('input', 'ocean_namelist_filename') - namelist = NameList(namelist_filename, path=indir) + namelistFileName = config.get('input', 'oceanNamelistFileName') + namelist = NameList(namelistFileName, path=inDirectory) - streams_filename = config.get('input', 'ocean_streams_filename') - streams = StreamsFile(streams_filename, streamsdir=indir) + streamsFileName = config.get('input', 'oceanStreamsFileName') + streams = StreamsFile(streamsFileName, streamsdir=inDirectory) # Note: input file, not a mesh file because we need dycore specific fields # such as refBottomDepth and namelist fields such as config_density0, as - # well as simulationStartTime, that are not guaranteed to be in the mesh file. + # well as simulationStartTime, that are not guaranteed to be in the mesh + # file. try: - inputfile = streams.readpath('restart')[0] + restartFile = streams.readpath('restart')[0] except ValueError: - raise IOError('No MPAS-O restart file found: need at least one restart file for OHC calculation') + raise IOError('No MPAS-O restart file found: need at least one ' + 'restart file for OHC calculation') # get a list of timeSeriesStats output files from the streams file, # reading only those that are between the start and end dates - startDate = config.get('time', 'timeseries_start_date') - endDate = config.get('time', 'timeseries_end_date') + startDate = config.get('timeSeries', 'startDate') + endDate = config.get('timeSeries', 'endDate') streamName = streams.find_stream(streamMap['timeSeriesStats']) - infiles = streams.readpath(streamName, startDate=startDate, + inFiles = streams.readpath(streamName, startDate=startDate, endDate=endDate) - print 'Reading files {} through {}'.format(infiles[0], infiles[-1]) + print 'Reading files {} through {}'.format(inFiles[0], inFiles[-1]) # Define/read in general variables print ' Read in depth and compute specific depth indexes...' - f = netcdf_dataset(inputfile, mode='r') + ncFile = netcdf_dataset(restartFile, mode='r') # reference depth [m] - depth = f.variables['refBottomDepth'][:] + depth = ncFile.variables['refBottomDepth'][:] # simulation start time - simStartTime = netCDF4.chartostring(f.variables['simulationStartTime'][:]) - simStartTime = str(simStartTime) - f.close() + simulationStartTime = netCDF4.chartostring( + ncFile.variables['simulationStartTime'][:]) + simulationStartTime = str(simulationStartTime) + ncFile.close() # specific heat [J/(kg*degC)] cp = namelist.getfloat('config_specific_heat_sea_water') # [kg/m3] rho = namelist.getfloat('config_density0') - fac = 1e-22*rho*cp + factor = 1e-22*rho*cp k700m = np.where(depth > 700.)[0][0] - 1 k2000m = np.where(depth > 2000.)[0][0] - 1 @@ -98,16 +106,16 @@ def ohc_timeseries(config, streamMap=None, variableMap=None): # Load data print ' Load ocean data...' - varList = ['avgLayerTemperature', - 'sumLayerMaskValue', - 'avgLayerArea', - 'avgLayerThickness'] + variableList = ['avgLayerTemperature', + 'sumLayerMaskValue', + 'avgLayerArea', + 'avgLayerThickness'] ds = xr.open_mfdataset( - infiles, + inFiles, preprocess=lambda x: preprocess_mpas(x, - yearoffset=yr_offset, + yearoffset=yearOffset, timestr='Time', - onlyvars=varList, + onlyvars=variableList, varmap=variableMap)) ds = remove_repeated_time_index(ds) @@ -115,118 +123,132 @@ def ohc_timeseries(config, streamMap=None, variableMap=None): # convert the start and end dates to datetime objects using # the Date class, which ensures the results are within the # supported range - time_start = Date(startDate).to_datetime(yr_offset) - time_end = Date(endDate).to_datetime(yr_offset) + timeStart = Date(startDate).to_datetime(yearOffset) + timeEnd = Date(endDate).to_datetime(yearOffset) # select only the data in the specified range of years - ds = ds.sel(Time=slice(time_start, time_end)) + ds = ds.sel(Time=slice(timeStart, timeEnd)) # Select year-1 data and average it (for later computing anomalies) - time_start_yr1 = Date(simStartTime).to_datetime(yr_offset) - if time_start_yr1 < time_start: - startDate_yr1 = simStartTime - endDate_yr1 = startDate_yr1[0:5]+'12-31'+startDate_yr1[10:] - infiles_yr1 = streams.readpath(streamName, startDate=startDate_yr1, - endDate=endDate_yr1) - ds_yr1 = xr.open_mfdataset( - infiles_yr1, - preprocess=lambda x: preprocess_mpas(x, - yearoffset=yr_offset, - timestr='Time', - onlyvars=varList, - varmap=variableMap)) - - ds_yr1 = remove_repeated_time_index(ds_yr1) + timeStartFirstYear = Date(simulationStartTime).to_datetime(yearOffset) + if timeStartFirstYear < timeStart: + startDateFirstYear = simulationStartTime + endDateFirstYear = '{}-12-31_23:59:59'.format(startDateFirstYear[0:4]) + filesFirstYear = streams.readpath(streamName, + startDate=startDateFirstYear, + endDate=endDateFirstYear) + dsFirstYear = xr.open_mfdataset( + filesFirstYear, + preprocess=lambda x: preprocess_mpas(x, + yearoffset=yearOffset, + timestr='Time', + onlyvars=variableList, + varmap=variableMap)) + + dsFirstYear = remove_repeated_time_index(dsFirstYear) else: - time_start = datetime.datetime(time_start.year, 1, 1) - time_end = datetime.datetime(time_start.year, 12, 31) - ds_yr1 = ds.sel(Time=slice(time_start, time_end)) - mean_yr1 = ds_yr1.mean('Time') + timeStart = datetime.datetime(timeStart.year, 1, 1) + timeEnd = datetime.datetime(timeStart.year, 12, 31) + dsFirstYear = ds.sel(Time=slice(timeStart, timeEnd)) + meanFirstYear = dsFirstYear.mean('Time') print ' Compute temperature anomalies...' avgLayerTemperature = ds.avgLayerTemperature - avgLayerTemperature_yr1 = mean_yr1.avgLayerTemperature - - avgLayTemp_anomaly = avgLayerTemperature - avgLayerTemperature_yr1 - - year_start = (pd.to_datetime(ds.Time.min().values)).year - year_end = (pd.to_datetime(ds.Time.max().values)).year - time_start = datetime.datetime(year_start, 1, 1) - time_end = datetime.datetime(year_end, 12, 31) - - if ref_casename_v0 != 'None': - print ' Load in OHC for ACMEv0 case...' - infiles_v0data = '{}/OHC.{}.year*.nc'.format( - indir_v0data, ref_casename_v0) - ds_v0 = xr.open_mfdataset( - infiles_v0data, - preprocess=lambda x: preprocess_mpas(x, yearoffset=yr_offset)) - ds_v0 = remove_repeated_time_index(ds_v0) - year_end_v0 = (pd.to_datetime(ds_v0.Time.max().values)).year - if year_start <= year_end_v0: - ds_v0_tslice = ds_v0.sel(Time=slice(time_start, time_end)) - else: - print ' Warning: v0 time series lies outside current bounds of v1 time series. Skipping it.' - ref_casename_v0 = 'None' + avgLayerTemperatureFirstYear = meanFirstYear.avgLayerTemperature + + avgLayTemperatureAnomaly = (avgLayerTemperature - + avgLayerTemperatureFirstYear) + + yearStart = (pd.to_datetime(ds.Time.min().values)).year + yearEnd = (pd.to_datetime(ds.Time.max().values)).year + timeStart = datetime.datetime(yearStart, 1, 1) + timeEnd = datetime.datetime(yearEnd, 12, 31) + + if preprocessedReferenceRunName != 'None': + print ' Load in OHC from preprocessed reference run...' + inFilesPreprocesses = '{}/OHC.{}.year*.nc'.format( + preprocessedInputDirectory, preprocessedReferenceRunName) + dsPreprocessed = xr.open_mfdataset( + inFilesPreprocesses, + preprocess=lambda x: preprocess_mpas(x, yearoffset=yearOffset)) + dsPreprocessed = remove_repeated_time_index(dsPreprocessed) + yearEndPreprocessed = \ + (pd.to_datetime(dsPreprocessed.Time.max().values)).year + if yearStart <= yearEndPreprocessed: + dsPreprocessedTimeSlice = dsPreprocessed.sel(Time=slice(timeStart, + timeEnd)) + else: + print ' Warning: Preprocessed time series ends before the ' \ + 'timeSeries startYear and will not be plotted.' + preprocessedReferenceRunName = 'None' sumLayerMaskValue = ds.sumLayerMaskValue avgLayerArea = ds.avgLayerArea avgLayerThickness = ds.avgLayerThickness print ' Compute OHC and make plots...' - for index in range(len(iregions)): - iregion = iregions[index] + for index in range(len(regionIndicesToPlot)): + regionIndex = regionIndicesToPlot[index] # Compute volume of each layer in the region: - layerArea = sumLayerMaskValue[:, iregion, :] * \ - avgLayerArea[:, iregion, :] - layerVolume = layerArea * avgLayerThickness[:, iregion, :] + layerArea = sumLayerMaskValue[:, regionIndex, :] * \ + avgLayerArea[:, regionIndex, :] + layerVolume = layerArea * avgLayerThickness[:, regionIndex, :] # Compute OHC: - ohc = layerVolume * avgLayTemp_anomaly[:, iregion, :] + ohc = layerVolume * avgLayTemperatureAnomaly[:, regionIndex, :] # OHC over 0-bottom depth range: - ohc_tot = ohc.sum('nVertLevels') - ohc_tot = fac*ohc_tot + ohcTotal = ohc.sum('nVertLevels') + ohcTotal = factor*ohcTotal # OHC over 0-700m depth range: - ohc_700m = fac*ohc[:, 0:k700m].sum('nVertLevels') + ohc700m = factor*ohc[:, 0:k700m].sum('nVertLevels') # OHC over 700m-2000m depth range: - ohc_2000m = fac*ohc[:, k700m+1:k2000m].sum('nVertLevels') + ohc2000m = factor*ohc[:, k700m+1:k2000m].sum('nVertLevels') # OHC over 2000m-bottom depth range: - ohc_btm = ohc[:, k2000m+1:kbtm].sum('nVertLevels') - ohc_btm = fac*ohc_btm + ohcBottom = ohc[:, k2000m+1:kbtm].sum('nVertLevels') + ohcBottom = factor*ohcBottom title = 'OHC, {}, 0-bottom (thick-), 0-700m (thin-), 700-2000m (--),' \ - ' 2000m-bottom (-.) \n {}'.format(plot_titles[iregion], casename) - - xlabel = 'Time [years]' - ylabel = '[x$10^{22}$ J]' - - if ref_casename_v0 != 'None': - figname = '{}/ohc_{}_{}_{}.png'.format(plots_dir, - regions[iregion], - casename, - ref_casename_v0) - ohc_v0_tot = ds_v0_tslice.ohc_tot - ohc_v0_700m = ds_v0_tslice.ohc_700m - ohc_v0_2000m = ds_v0_tslice.ohc_2000m - ohc_v0_btm = ds_v0_tslice.ohc_btm - title = '{} (r), {} (b)'.format(title, ref_casename_v0) - timeseries_analysis_plot(config, [ohc_tot, ohc_700m, ohc_2000m, - ohc_btm, ohc_v0_tot, ohc_v0_700m, - ohc_v0_2000m, ohc_v0_btm], - N_movavg, title, xlabel, ylabel, figname, + ' 2000m-bottom (-.) \n {}'.format(plotTitles[regionIndex], + mainRunName) + + xLabel = 'Time [years]' + yLabel = '[x$10^{22}$ J]' + + if preprocessedReferenceRunName != 'None': + figureName = '{}/ohc_{}_{}_{}.png'.format( + plotsDirectory, + regions[regionIndex], + mainRunName, + preprocessedReferenceRunName) + ohcPreprocessedTotal = dsPreprocessedTimeSlice.ohc_tot + ohcPreprocessed700m = dsPreprocessedTimeSlice.ohc_700m + ohcPreprocessed2000m = dsPreprocessedTimeSlice.ohc_2000m + ohcPreprocessedBottom = dsPreprocessedTimeSlice.ohc_btm + title = '{} (r), {} (b)'.format(title, + preprocessedReferenceRunName) + timeseries_analysis_plot(config, [ohcTotal, ohc700m, ohc2000m, + ohcBottom, ohcPreprocessedTotal, + ohcPreprocessed700m, + ohcPreprocessed2000m, + ohcPreprocessedBottom], + movingAveragePoints, title, + xLabel, yLabel, figureName, lineStyles=['r-', 'r-', 'r--', 'r-.', 'b-', 'b-', 'b--', 'b-.'], lineWidths=[2, 1, 1.5, 1.5, 2, 1, 1.5, 1.5]) - if not compare_with_obs and ref_casename_v0 == 'None': - figname = '{}/ohc_{}_{}.png'.format(plots_dir, regions[iregion], - casename) - timeseries_analysis_plot(config, [ohc_tot, ohc_700m, ohc_2000m, - ohc_btm], - N_movavg, title, xlabel, ylabel, figname, + if (not compareWithObservations and + preprocessedReferenceRunName == 'None'): + figureName = '{}/ohc_{}_{}.png'.format(plotsDirectory, + regions[regionIndex], + mainRunName) + timeseries_analysis_plot(config, [ohcTotal, ohc700m, ohc2000m, + ohcBottom], + movingAveragePoints, title, + xLabel, yLabel, figureName, lineStyles=['r-', 'r-', 'r--', 'r-.'], lineWidths=[2, 1, 1.5, 1.5]) diff --git a/mpas_analysis/ocean/sst_timeseries.py b/mpas_analysis/ocean/sst_timeseries.py index c59115030..7bc6235a0 100644 --- a/mpas_analysis/ocean/sst_timeseries.py +++ b/mpas_analysis/ocean/sst_timeseries.py @@ -8,6 +8,7 @@ from ..shared.plot.plotting import timeseries_analysis_plot from ..shared.io import StreamsFile +from ..shared.io.utility import buildConfigFullPath from ..shared.timekeeping.Date import Date @@ -27,45 +28,48 @@ def sst_timeseries(config, streamMap=None, variableMap=None): to their mpas_analysis counterparts. Author: Xylar Asay-Davis, Milena Veneziani - Last Modified: 12/05/2016 + Last Modified: 02/02/2017 """ # Define/read in general variables print ' Load SST data...' # read parameters from config file - indir = config.get('paths', 'archive_dir_ocn') + inDirectory = config.get('input', 'baseDirectory') - streams_filename = config.get('input', 'ocean_streams_filename') - streams = StreamsFile(streams_filename, streamsdir=indir) + streamsFileName = config.get('input', 'oceanStreamsFileName') + streams = StreamsFile(streamsFileName, streamsdir=inDirectory) # get a list of timeSeriesStats output files from the streams file, # reading only those that are between the start and end dates - startDate = config.get('time', 'timeseries_start_date') - endDate = config.get('time', 'timeseries_end_date') + startDate = config.get('timeSeries', 'startDate') + endDate = config.get('timeSeries', 'endDate') streamName = streams.find_stream(streamMap['timeSeriesStats']) - infiles = streams.readpath(streamName, startDate=startDate, + inFiles = streams.readpath(streamName, startDate=startDate, endDate=endDate) - print 'Reading files {} through {}'.format(infiles[0], infiles[-1]) + print 'Reading files {} through {}'.format(inFiles[0], inFiles[-1]) - casename = config.get('case', 'casename') - ref_casename_v0 = config.get('case', 'ref_casename_v0') - indir_v0data = config.get('paths', 'ref_archive_v0_ocndir') + mainRunName = config.get('runs', 'mainRunName') + preprocessedReferenceRunName = config.get('runs', + 'preprocessedReferenceRunName') + preprocessedInputDirectory = config.get('oceanPreprocessedReference', + 'baseDirectory') - plots_dir = config.get('paths', 'plots_dir') + plotsDirectory = buildConfigFullPath(config, 'output', 'plotsSubdirectory') - yr_offset = config.getint('time', 'yr_offset') + yearOffset = config.getint('time', 'yearOffset') - N_movavg = config.getint('sst_timeseries', 'N_movavg') + movingAveragePoints = config.getint('timeSeriesSST', 'movingAveragePoints') regions = config.getExpression('regions', 'regions') - plot_titles = config.getExpression('regions', 'plot_titles') - iregions = config.getExpression('sst_timeseries', 'regionIndicesToPlot') + plotTitles = config.getExpression('regions', 'plotTitles') + regionIndicesToPlot = config.getExpression('timeSeriesSST', + 'regionIndicesToPlot') # Load data: varList = ['avgSurfaceTemperature'] ds = xr.open_mfdataset( - infiles, - preprocess=lambda x: preprocess_mpas(x, yearoffset=yr_offset, + inFiles, + preprocess=lambda x: preprocess_mpas(x, yearoffset=yearOffset, timestr='Time', onlyvars=varList, varmap=variableMap)) @@ -74,57 +78,63 @@ def sst_timeseries(config, streamMap=None, variableMap=None): # convert the start and end dates to datetime objects using # the Date class, which ensures the results are within the # supported range - time_start = Date(startDate).to_datetime(yr_offset) - time_end = Date(endDate).to_datetime(yr_offset) + timeStart = Date(startDate).to_datetime(yearOffset) + timeEnd = Date(endDate).to_datetime(yearOffset) # select only the data in the specified range of years - ds = ds.sel(Time=slice(time_start, time_end)) + ds = ds.sel(Time=slice(timeStart, timeEnd)) SSTregions = ds.avgSurfaceTemperature - year_start = (pd.to_datetime(ds.Time.min().values)).year - year_end = (pd.to_datetime(ds.Time.max().values)).year - time_start = datetime.datetime(year_start, 1, 1) - time_end = datetime.datetime(year_end, 12, 31) - - if ref_casename_v0 != 'None': - print ' Load in SST for ACMEv0 case...' - infiles_v0data = '{}/SST.{}.year*.nc'.format(indir_v0data, - ref_casename_v0) - ds_v0 = xr.open_mfdataset( - infiles_v0data, - preprocess=lambda x: preprocess_mpas(x, yearoffset=yr_offset)) - ds_v0 = remove_repeated_time_index(ds_v0) - year_end_v0 = (pd.to_datetime(ds_v0.Time.max().values)).year - if year_start <= year_end_v0: - ds_v0_tslice = ds_v0.sel(Time=slice(time_start, time_end)) + yearStart = (pd.to_datetime(ds.Time.min().values)).year + yearEnd = (pd.to_datetime(ds.Time.max().values)).year + timeStart = datetime.datetime(yearStart, 1, 1) + timeEnd = datetime.datetime(yearEnd, 12, 31) + + if preprocessedReferenceRunName != 'None': + print ' Load in SST for a preprocesses reference run...' + inFilesPreprocessed = '{}/SST.{}.year*.nc'.format( + preprocessedInputDirectory, preprocessedReferenceRunName) + dsPreprocessed = xr.open_mfdataset( + inFilesPreprocessed, + preprocess=lambda x: preprocess_mpas(x, yearoffset=yearOffset)) + dsPreprocessed = remove_repeated_time_index(dsPreprocessed) + yearEndPreprocessed = \ + (pd.to_datetime(dsPreprocessed.Time.max().values)).year + if yearStart <= yearEndPreprocessed: + dsPreprocessedTimeSlice = \ + dsPreprocessed.sel(Time=slice(timeStart, timeEnd)) else: - print ' Warning: v0 time series lies outside current bounds of v1 time series. Skipping it.' - ref_casename_v0 = 'None' + print ' Warning: Preprocessed time series ends before the ' \ + 'timeSeries startYear and will not be plotted.' + preprocessedReferenceRunName = 'None' print ' Make plots...' - for index in range(len(iregions)): - iregion = iregions[index] - - title = plot_titles[iregion] - title = 'SST, %s, %s (r-)' % (title, casename) - xlabel = 'Time [years]' - ylabel = '[$^\circ$ C]' - - SST = SSTregions[:, iregion] - - if ref_casename_v0 != 'None': - figname = '{}/sst_{}_{}_{}.png'.format(plots_dir, regions[iregion], - casename, ref_casename_v0) - SST_v0 = ds_v0_tslice.SST - - title = '{}\n {} (b-)'.format(title, ref_casename_v0) - timeseries_analysis_plot(config, [SST, SST_v0], N_movavg, - title, xlabel, ylabel, figname, + for index in range(len(regionIndicesToPlot)): + regionIndex = regionIndicesToPlot[index] + + title = plotTitles[regionIndex] + title = 'SST, %s, %s (r-)' % (title, mainRunName) + xLabel = 'Time [years]' + yLabel = '[$^\circ$ C]' + + SST = SSTregions[:, regionIndex] + + if preprocessedReferenceRunName != 'None': + figureName = '{}/sst_{}_{}_{}.png'.format( + plotsDirectory, regions[regionIndex], mainRunName, + preprocessedReferenceRunName) + SST_v0 = dsPreprocessedTimeSlice.SST + + title = '{}\n {} (b-)'.format(title, preprocessedReferenceRunName) + timeseries_analysis_plot(config, [SST, SST_v0], + movingAveragePoints, + title, xLabel, yLabel, figureName, lineStyles=['r-', 'b-'], lineWidths=[1.2, 1.2]) else: - figname = '{}/sst_{}_{}.png'.format(plots_dir, regions[iregion], - casename) - timeseries_analysis_plot(config, [SST], N_movavg, title, xlabel, - ylabel, figname, lineStyles=['r-'], - lineWidths=[1.2]) + figureName = '{}/sst_{}_{}.png'.format(plotsDirectory, + regions[regionIndex], + mainRunName) + timeseries_analysis_plot(config, [SST], movingAveragePoints, title, + xLabel, yLabel, figureName, + lineStyles=['r-'], lineWidths=[1.2]) diff --git a/mpas_analysis/sea_ice/modelvsobs.py b/mpas_analysis/sea_ice/modelvsobs.py index f08740f74..6f6888d88 100644 --- a/mpas_analysis/sea_ice/modelvsobs.py +++ b/mpas_analysis/sea_ice/modelvsobs.py @@ -16,6 +16,7 @@ from ..shared.plot.plotting import plot_polar_comparison from ..shared.io import StreamsFile +from ..shared.io.utility import buildConfigFullPath def seaice_modelvsobs(config, streamMap=None, variableMap=None): @@ -33,62 +34,61 @@ def seaice_modelvsobs(config, streamMap=None, variableMap=None): to their mpas_analysis counterparts. Author: Xylar Asay-Davis, Milena Veneziani - Last Modified: 12/07/2016 + Last Modified: 02/02/2017 """ # read parameters from config file - indir = config.get('paths', 'archive_dir_ocn') + inDirectory = config.get('input', 'baseDirectory') - streams_filename = config.get('input', 'seaice_streams_filename') - streams = StreamsFile(streams_filename, streamsdir=indir) + streamsFileName = config.get('input', 'seaIceStreamsFileName') + streams = StreamsFile(streamsFileName, streamsdir=inDirectory) # get a list of timeSeriesStatsMonthly output files from the streams file, # reading only those that are between the start and end dates - startDate = config.get('time', 'climo_start_date') - endDate = config.get('time', 'climo_end_date') + startDate = config.get('climatology', 'startDate') + endDate = config.get('climatology', 'endDate') streamName = streams.find_stream(streamMap['timeSeriesStats']) infiles = streams.readpath(streamName, startDate=startDate, endDate=endDate) print 'Reading files {} through {}'.format(infiles[0], infiles[-1]) - plots_dir = config.get('paths', 'plots_dir') - obsdir = config.get('paths', 'obs_seaicedir') + plotsDirectory = buildConfigFullPath(config, 'output', 'plotsSubdirectory') + obsDirectory = config.get('seaIceObservations', 'baseDirectory') - casename = config.get('case', 'casename') + mainRunName = config.get('runs', 'mainRunName') - remapfile = config.get('data', 'mpas_remapfile') - climodir = config.get('data', 'mpas_climodir') + mpasRemapFile = config.get('remapping', 'mpasRemapFile') + climatologyDirectory = buildConfigFullPath(config, 'output', + 'climatologySubdirectory') - climo_yr1 = config.getint('time', 'climo_yr1') - climo_yr2 = config.getint('time', 'climo_yr2') - yr_offset = config.getint('time', 'yr_offset') + startYear = config.getint('climatology', 'startYear') + endYear = config.getint('climatology', 'endYear') + yearOffset = config.getint('time', 'yearOffset') - # climodir = "{}/{}".format(climodir, casename) - climodir_regridded = "{}/mpas_regridded".format(climodir) - if not os.path.isdir(climodir): + # climatologyDirectory = "{}/{}".format(climatologyDirectory, mainRunName) + climatologyRegriddedDirectory = "{}/mpas_regridded".format( + climatologyDirectory) + if not os.path.isdir(climatologyDirectory): print "\nClimatology directory does not exist. Create it...\n" - os.mkdir(climodir) - if not os.path.isdir(climodir_regridded): + os.mkdir(climatologyDirectory) + if not os.path.isdir(climatologyRegriddedDirectory): print "\nRegridded directory does not exist. Create it...\n" - os.mkdir(climodir_regridded) - - print indir - print climodir - - # Model climo (output) filenames - climofiles = {} - climofiles['winNH'] = "mpas-cice_climo.years{:04d}-{:04d}.jfm.nc".format( - climo_yr1, climo_yr2) - climofiles['sumNH'] = "mpas-cice_climo.years{:04d}-{:04d}.jas.nc".format( - climo_yr1, climo_yr2) - climofiles['winSH'] = "mpas-cice_climo.years{:04d}-{:04d}.djf.nc".format( - climo_yr1, climo_yr2) - climofiles['sumSH'] = "mpas-cice_climo.years{:04d}-{:04d}.jja.nc".format( - climo_yr1, climo_yr2) - climofiles['on'] = "mpas-cice_climo.years{:04d}-{:04d}.on.nc".format( - climo_yr1, climo_yr2) - climofiles['fm'] = "mpas-cice_climo.years{:04d}-{:04d}.fm.nc".format( - climo_yr1, climo_yr2) + os.mkdir(climatologyRegriddedDirectory) + + # Model climatology (output) filenames + climatologyFiles = {} + climatologyFiles['winNH'] = \ + "mpas-cice_climo.years{:04d}-{:04d}.jfm.nc".format(startYear, endYear) + climatologyFiles['sumNH'] = \ + "mpas-cice_climo.years{:04d}-{:04d}.jas.nc".format(startYear, endYear) + climatologyFiles['winSH'] = \ + "mpas-cice_climo.years{:04d}-{:04d}.djf.nc".format(startYear, endYear) + climatologyFiles['sumSH'] = \ + "mpas-cice_climo.years{:04d}-{:04d}.jja.nc".format(startYear, endYear) + climatologyFiles['on'] = \ + "mpas-cice_climo.years{:04d}-{:04d}.on.nc".format(startYear, endYear) + climatologyFiles['fm'] = \ + "mpas-cice_climo.years{:04d}-{:04d}.fm.nc".format(startYear, endYear) # make a dictionary of the months in each climotology monthsInClim = {} @@ -100,58 +100,58 @@ def seaice_modelvsobs(config, streamMap=None, variableMap=None): monthsInClim['fm'] = [2, 3] # Obs filenames - obs_iceconc_filenames = {} - obs_iceconc_filenames['winNH_NASATeam'] = \ + obsIceConcFileNames = {} + obsIceConcFileNames['winNH_NASATeam'] = \ "{}/SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_NH_" \ - "jfm.interp0.5x0.5.nc".format(obsdir) - obs_iceconc_filenames['sumNH_NASATeam'] = \ + "jfm.interp0.5x0.5.nc".format(obsDirectory) + obsIceConcFileNames['sumNH_NASATeam'] = \ "{}/SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_NH_" \ - "jas.interp0.5x0.5.nc".format(obsdir) - obs_iceconc_filenames['winSH_NASATeam'] = \ + "jas.interp0.5x0.5.nc".format(obsDirectory) + obsIceConcFileNames['winSH_NASATeam'] = \ "{}/SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_SH_" \ - "djf.interp0.5x0.5.nc".format(obsdir) - obs_iceconc_filenames['sumSH_NASATeam'] = \ + "djf.interp0.5x0.5.nc".format(obsDirectory) + obsIceConcFileNames['sumSH_NASATeam'] = \ "{}/SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_SH_" \ - "jja.interp0.5x0.5.nc".format(obsdir) - obs_iceconc_filenames['winNH_Bootstrap'] = \ + "jja.interp0.5x0.5.nc".format(obsDirectory) + obsIceConcFileNames['winNH_Bootstrap'] = \ "{}/SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_" \ - "NH_jfm.interp0.5x0.5.nc".format(obsdir) - obs_iceconc_filenames['sumNH_Bootstrap'] = \ + "NH_jfm.interp0.5x0.5.nc".format(obsDirectory) + obsIceConcFileNames['sumNH_Bootstrap'] = \ "{}/SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_" \ - "NH_jas.interp0.5x0.5.nc".format(obsdir) - obs_iceconc_filenames['winSH_Bootstrap'] = \ + "NH_jas.interp0.5x0.5.nc".format(obsDirectory) + obsIceConcFileNames['winSH_Bootstrap'] = \ "{}/SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_" \ - "SH_djf.interp0.5x0.5.nc".format(obsdir) - obs_iceconc_filenames['sumSH_Bootstrap'] = \ + "SH_djf.interp0.5x0.5.nc".format(obsDirectory) + obsIceConcFileNames['sumSH_Bootstrap'] = \ "{}/SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_" \ - "SH_jja.interp0.5x0.5.nc".format(obsdir) - obs_icethick_filenames = {} - obs_icethick_filenames['onNH'] = "{}/ICESat/ICESat_gridded_mean_" \ - "thickness_NH_on.interp0.5x0.5.nc".format(obsdir) - obs_icethick_filenames['fmNH'] = "{}/ICESat/ICESat_gridded_mean_" \ - "thickness_NH_fm.interp0.5x0.5.nc".format(obsdir) - obs_icethick_filenames['onSH'] = "{}/ICESat/ICESat_gridded_mean_" \ - "thickness_SH_on.interp0.5x0.5.nc".format(obsdir) - obs_icethick_filenames['fmSH'] = "{}/ICESat/ICESat_gridded_mean_" \ - "thickness_SH_fm.interp0.5x0.5.nc".format(obsdir) + "SH_jja.interp0.5x0.5.nc".format(obsDirectory) + obsIceThickFileNames = {} + obsIceThickFileNames['onNH'] = "{}/ICESat/ICESat_gridded_mean_" \ + "thickness_NH_on.interp0.5x0.5.nc".format(obsDirectory) + obsIceThickFileNames['fmNH'] = "{}/ICESat/ICESat_gridded_mean_" \ + "thickness_NH_fm.interp0.5x0.5.nc".format(obsDirectory) + obsIceThickFileNames['onSH'] = "{}/ICESat/ICESat_gridded_mean_" \ + "thickness_SH_on.interp0.5x0.5.nc".format(obsDirectory) + obsIceThickFileNames['fmSH'] = "{}/ICESat/ICESat_gridded_mean_" \ + "thickness_SH_fm.interp0.5x0.5.nc".format(obsDirectory) # Checks on directory/files existence: - for climName in obs_iceconc_filenames: - obs_filename = obs_iceconc_filenames[climName] - if not os.path.isfile(obs_filename): + for climName in obsIceConcFileNames: + obsFileNames = obsIceConcFileNames[climName] + if not os.path.isfile(obsFileNames): raise SystemExit("Obs file {} not found. Exiting...".format( - obs_filename)) - for climName in obs_icethick_filenames: - obs_filename = obs_icethick_filenames[climName] - if not os.path.isfile(obs_filename): + obsFileNames)) + for climName in obsIceThickFileNames: + obsFileNames = obsIceThickFileNames[climName] + if not os.path.isfile(obsFileNames): raise SystemExit("Obs file {} not found. Exiting...".format( - obs_filename)) + obsFileNames)) # Load data print " Load sea-ice data..." ds = xr.open_mfdataset( infiles, - preprocess=lambda x: preprocess_mpas(x, yearoffset=yr_offset, + preprocess=lambda x: preprocess_mpas(x, yearoffset=yearOffset, timestr='Time', onlyvars=['iceAreaCell', 'iceVolumeCell'], @@ -160,36 +160,37 @@ def seaice_modelvsobs(config, streamMap=None, variableMap=None): # Compute climatologies (first motnhly and then seasonally) print " Compute seasonal climatologies..." - time_start = datetime.datetime(yr_offset+climo_yr1, 1, 1) - time_end = datetime.datetime(yr_offset+climo_yr2, 12, 31) - ds_tslice = ds.sel(Time=slice(time_start, time_end)) + timeStart = datetime.datetime(yearOffset+startYear, 1, 1) + timeEnd = datetime.datetime(yearOffset+endYear, 12, 31) + dsTimeSlice = ds.sel(Time=slice(timeStart, timeEnd)) # check that each year has 24 months (?) - monthly_clim = ds_tslice.groupby('Time.month').mean('Time') + monthlyClimotology = dsTimeSlice.groupby('Time.month').mean('Time') daysInMonth = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] monthLetters = ['J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'] - clims = {} + climatologies = {} for climName in monthsInClim: months = monthsInClim[climName] month = months[0] days = daysInMonth[month-1] - climatology = days*monthly_clim.sel(month=month) + climatology = days*monthlyClimotology.sel(month=month) totalDays = days for month in months[1:]: days = daysInMonth[month-1] - climatology += days*monthly_clim.sel(month=month) + climatology += days*monthlyClimotology.sel(month=month) totalDays += days climatology /= totalDays - clims[climName] = climatology + climatologies[climName] = climatology print " Regrid fields to regular grid..." - for climName in clims: + for climName in climatologies: # Save to netcdf files - outFileName = "{}/{}".format(climodir, climofiles[climName]) - clims[climName].to_netcdf(outFileName) - args = ["ncremap", "-P", "mpas", "-i", outFileName, "-m", remapfile, - "-O", climodir_regridded] + outFileName = "{}/{}".format(climatologyDirectory, + climatologyFiles[climName]) + climatologies[climName].to_netcdf(outFileName) + args = ["ncremap", "-P", "mpas", "-i", outFileName, + "-m", mpasRemapFile, "-O", climatologyRegriddedDirectory] try: subprocess.check_call(args) except subprocess.CalledProcessError, e: @@ -198,109 +199,120 @@ def seaice_modelvsobs(config, streamMap=None, variableMap=None): raise e print " Make ice concentration plots..." - suptitle = "Ice concentration" + subtitle = "Ice concentration" # interate over observations of sea-ice concentration first = True - for climName in ['winNH', 'winSH', 'sumNH', 'sumSH']: - hemisphere = climName[-2:] - season = climName[:-2] - - if hemisphere == 'NH': - plotProjection = 'npstere' - else: - plotProjection = 'spstere' - - clevsModelObs = config.getExpression('seaice_modelvsobs', - 'clevsModelObs_conc_{}'.format( - season)) - cmap = plt.get_cmap(config.get('seaice_modelvsobs', 'cmapModelObs')) - cmapIndices = config.getExpression('seaice_modelvsobs', - 'cmapIndicesModelObs') - cmapModelObs = cols.ListedColormap(cmap(cmapIndices), "cmapModelObs") - - clevsDiff = config.getExpression('seaice_modelvsobs', - 'clevsDiff_conc_{}'.format(season)) - cmap = plt.get_cmap(config.get('seaice_modelvsobs', 'cmapDiff')) - cmapIndices = config.getExpression('seaice_modelvsobs', - 'cmapIndicesDiff') - cmapDiff = cols.ListedColormap(cmap(cmapIndices), "cmapDiff") - - lon0 = config.getfloat('seaice_modelvsobs', - 'lon0_{}'.format(hemisphere)) - latmin = config.getfloat('seaice_modelvsobs', - 'latmin_{}'.format(hemisphere)) + for season in ['Winter', 'Summer']: + for hemisphere in ['NH', 'SH']: + climName = '{}{}'.format(season[0:3].lower(), hemisphere) - # Load in sea-ice data - # Model... - # ice concentrations - fileName = "{}/{}".format(climodir_regridded, climofiles[climName]) - f = netcdf_dataset(fileName, mode='r') - iceconc = f.variables["iceAreaCell"][:] - if(first): - lons = f.variables["lon"][:] - lats = f.variables["lat"][:] - print "Min lon: ", np.amin(lons), "Max lon: ", np.amax(lons) - print "Min lat: ", np.amin(lats), "Max lat: ", np.amax(lats) - Lons, Lats = np.meshgrid(lons, lats) - first = False - f.close() - - # ...and observations - # ice concentrations from NASATeam (or Bootstrap) algorithm - for obsName in ['NASATeam', 'Bootstrap']: - - fileName = obs_iceconc_filenames['{}_{}'.format(climName, obsName)] - f = netcdf_dataset(fileName, mode='r') - obs_iceconc = f.variables["AICE"][:] - f.close() - - diff = iceconc - obs_iceconc - - monthsName = [] - for month in monthsInClim[climName]: - monthsName.append(monthLetters[month-1]) - monthsName = ''.join(monthsName) - - title = "{} ({}, years {:04d}-{:04d})".format( - suptitle, monthsName, climo_yr1, climo_yr2) - fileout = "{}/iceconc{}{}_{}_{}_years{:04d}-{:04d}.png".format( - plots_dir, obsName, hemisphere, casename, monthsName, - climo_yr1, climo_yr2) - plot_polar_comparison( - config, - Lons, - Lats, - iceconc, - obs_iceconc, - diff, - cmapModelObs, - clevsModelObs, - cmapDiff, - clevsDiff, - title=title, - fileout=fileout, - plotProjection=plotProjection, - latmin=latmin, - lon0=lon0, - modelTitle=casename, - obsTitle="Observations (SSM/I {})".format(obsName), - diffTitle="Model-Observations", - cbarlabel="fraction") + if hemisphere == 'NH': + plotProjection = 'npstere' + else: + plotProjection = 'spstere' + + resultConcContourValues = config.getExpression( + 'regriddedSeaIceConcThick', + 'resultConc{}ContourValues'.format(season)) + resultColormap = plt.get_cmap(config.get( + 'regriddedSeaIceConcThick', 'resultColormap')) + resultColormapIndices = config.getExpression( + 'regriddedSeaIceConcThick', 'resultColormapIndices') + resultColormap = cols.ListedColormap( + resultColormap(resultColormapIndices), "resultColormap") + + differenceConcContourValues = config.getExpression( + 'regriddedSeaIceConcThick', + 'differenceConc{}ContourValues'.format(season)) + differenceColormap = plt.get_cmap(config.get( + 'regriddedSeaIceConcThick', 'differenceColormap')) + differenceColormapIndices = config.getExpression( + 'regriddedSeaIceConcThick', 'differenceColormapIndices') + differenceColormap = cols.ListedColormap( + differenceColormap(differenceColormapIndices), + "differenceColormap") + + referenceLongitude = config.getfloat( + 'regriddedSeaIceConcThick', + 'referenceLongitude{}'.format(hemisphere)) + minimumLatitude = config.getfloat( + 'regriddedSeaIceConcThick', + 'minimumLatitude{}'.format(hemisphere)) + + # Load in sea-ice data + # Model... + # ice concentrations + fileName = "{}/{}".format(climatologyRegriddedDirectory, + climatologyFiles[climName]) + ncFile = netcdf_dataset(fileName, mode='r') + iceConcentration = ncFile.variables["iceAreaCell"][:] + if(first): + lons = ncFile.variables["lon"][:] + lats = ncFile.variables["lat"][:] + print "Min lon: ", np.amin(lons), "Max lon: ", np.amax(lons) + print "Min lat: ", np.amin(lats), "Max lat: ", np.amax(lats) + Lons, Lats = np.meshgrid(lons, lats) + first = False + ncFile.close() + + # ...and observations + # ice concentrations from NASATeam (or Bootstrap) algorithm + for obsName in ['NASATeam', 'Bootstrap']: + + fileName = obsIceConcFileNames[ + '{}_{}'.format(climName, obsName)] + ncFile = netcdf_dataset(fileName, mode='r') + obsIceConcentration = ncFile.variables["AICE"][:] + ncFile.close() + + difference = iceConcentration - obsIceConcentration + + monthsName = [] + for month in monthsInClim[climName]: + monthsName.append(monthLetters[month-1]) + monthsName = ''.join(monthsName) + + title = "{} ({}, years {:04d}-{:04d})".format( + subtitle, monthsName, startYear, endYear) + fileout = "{}/iceconc{}{}_{}_{}_years{:04d}-{:04d}.png".format( + plotsDirectory, obsName, hemisphere, mainRunName, + monthsName, startYear, endYear) + plot_polar_comparison( + config, + Lons, + Lats, + iceConcentration, + obsIceConcentration, + difference, + resultColormap, + resultConcContourValues, + differenceColormap, + differenceConcContourValues, + title=title, + fileout=fileout, + plotProjection=plotProjection, + latmin=minimumLatitude, + lon0=referenceLongitude, + modelTitle=mainRunName, + obsTitle="Observations (SSM/I {})".format(obsName), + diffTitle="Model-Observations", + cbarlabel="fraction") print " Make ice thickness plots..." # Plot Northern Hemisphere FM sea-ice thickness - suptitle = "Ice thickness" + subtitle = "Ice thickness" # interate over observations of sea-ice thickness for climName in ['fm', 'on']: # Load in sea-ice data # Model... # ice concentrations - fileName = "{}/{}".format(climodir_regridded, climofiles[climName]) - f = netcdf_dataset(fileName, mode='r') - icethick = f.variables["iceVolumeCell"][:] - f.close() + fileName = "{}/{}".format(climatologyRegriddedDirectory, + climatologyFiles[climName]) + ncFile = netcdf_dataset(fileName, mode='r') + iceThickness = ncFile.variables["iceVolumeCell"][:] + ncFile.close() monthsName = [] for month in monthsInClim[climName]: @@ -311,68 +323,73 @@ def seaice_modelvsobs(config, streamMap=None, variableMap=None): # ...and observations # ice concentrations from NASATeam (or Bootstrap) algorithm - clevsModelObs = config.getExpression( - 'seaice_modelvsobs', - 'clevsModelObs_thick_{}'.format(hemisphere)) - cmap = plt.get_cmap(config.get('seaice_modelvsobs', - 'cmapModelObs')) - cmapIndices = config.getExpression('seaice_modelvsobs', - 'cmapIndicesModelObs') - cmapModelObs = cols.ListedColormap(cmap(cmapIndices), - "cmapModelObs") - - clevsDiff = config.getExpression( - 'seaice_modelvsobs', - 'clevsDiff_thick_{}'.format(hemisphere)) - cmap = plt.get_cmap(config.get('seaice_modelvsobs', 'cmapDiff')) - cmapIndices = config.getExpression('seaice_modelvsobs', - 'cmapIndicesDiff') - cmapDiff = cols.ListedColormap(cmap(cmapIndices), "cmapDiff") - - lon0 = config.getfloat('seaice_modelvsobs', - 'lon0_{}'.format(hemisphere)) - latmin = config.getfloat('seaice_modelvsobs', - 'latmin_{}'.format(hemisphere)) - - fileName = obs_icethick_filenames['{}{}'.format(climName, - hemisphere)] - f = netcdf_dataset(fileName, mode='r') - obs_icethick = f.variables["HI"][:] - f.close() + resultThickContourValues = config.getExpression( + 'regriddedSeaIceConcThick', + 'resultThick{}ContourValues'.format(hemisphere)) + resultColormap = plt.get_cmap(config.get( + 'regriddedSeaIceConcThick', 'resultColormap')) + resultColormapIndices = config.getExpression( + 'regriddedSeaIceConcThick', 'resultColormapIndices') + resultColormap = cols.ListedColormap( + resultColormap(resultColormapIndices), "resultColormap") + + differenceThickContourValues = config.getExpression( + 'regriddedSeaIceConcThick', + 'differenceThick{}ContourValues'.format(hemisphere)) + differenceColormap = plt.get_cmap(config.get( + 'regriddedSeaIceConcThick', 'differenceColormap')) + differenceColormapIndices = config.getExpression( + 'regriddedSeaIceConcThick', 'differenceColormapIndices') + differenceColormap = cols.ListedColormap( + differenceColormap(differenceColormapIndices), + "differenceColormap") + + referenceLongitude = config.getfloat( + 'regriddedSeaIceConcThick', + 'referenceLongitude{}'.format(hemisphere)) + minimumLatitude = config.getfloat( + 'regriddedSeaIceConcThick', + 'minimumLatitude{}'.format(hemisphere)) + + fileName = obsIceThickFileNames['{}{}'.format(climName, + hemisphere)] + ncFile = netcdf_dataset(fileName, mode='r') + obsIceThickness = ncFile.variables["HI"][:] + ncFile.close() # Mask thickness fields - icethick[icethick == 0] = ma.masked - obs_icethick = ma.masked_values(obs_icethick, 0) + iceThickness[iceThickness == 0] = ma.masked + obsIceThickness = ma.masked_values(obsIceThickness, 0) if hemisphere == 'NH': # Obs thickness should be nan above 86 (ICESat data) - obs_icethick[Lats > 86] = ma.masked + obsIceThickness[Lats > 86] = ma.masked plotProjection = 'npstere' else: plotProjection = 'spstere' - diff = icethick - obs_icethick + difference = iceThickness - obsIceThickness - title = "{} ({}, years {:04d}-{:04d})".format(suptitle, monthsName, - climo_yr1, climo_yr2) + title = "{} ({}, years {:04d}-{:04d})".format(subtitle, monthsName, + startYear, endYear) fileout = "{}/icethick{}_{}_{}_years{:04d}-{:04d}.png".format( - plots_dir, hemisphere, casename, monthsName, climo_yr1, - climo_yr2) + plotsDirectory, hemisphere, mainRunName, monthsName, startYear, + endYear) plot_polar_comparison( config, Lons, Lats, - icethick, - obs_icethick, - diff, - cmapModelObs, - clevsModelObs, - cmapDiff, - clevsDiff, + iceThickness, + obsIceThickness, + difference, + resultColormap, + resultThickContourValues, + differenceColormap, + differenceThickContourValues, title=title, fileout=fileout, plotProjection=plotProjection, - latmin=latmin, - lon0=lon0, - modelTitle=casename, + latmin=minimumLatitude, + lon0=referenceLongitude, + modelTitle=mainRunName, obsTitle="Observations (ICESat)", diffTitle="Model-Observations", cbarlabel="m") diff --git a/mpas_analysis/sea_ice/timeseries.py b/mpas_analysis/sea_ice/timeseries.py index b9e77e6e6..dbce1a860 100644 --- a/mpas_analysis/sea_ice/timeseries.py +++ b/mpas_analysis/sea_ice/timeseries.py @@ -9,6 +9,7 @@ from ..shared.plot.plotting import timeseries_analysis_plot from ..shared.io import StreamsFile +from ..shared.io.utility import buildConfigFullPath from ..shared.timekeeping.Date import Date @@ -27,66 +28,71 @@ def seaice_timeseries(config, streamMap=None, variableMap=None): to their mpas_analysis counterparts. Author: Xylar Asay-Davis, Milena Veneziani - Last Modified: 12/07/2016 + Last Modified: 02/02/2017 """ # read parameters from config file - indir = config.get('paths', 'archive_dir_ocn') + inDirectory = config.get('input', 'baseDirectory') - - streams_filename = config.get('input', 'seaice_streams_filename') - streams = StreamsFile(streams_filename, streamsdir=indir) + streamsFileName = config.get('input', 'seaIceStreamsFileName') + streams = StreamsFile(streamsFileName, streamsdir=inDirectory) # get a list of timeSeriesStatsMonthly output files from the streams file, # reading only those that are between the start and end dates - startDate = config.get('time', 'timeseries_start_date') - endDate = config.get('time', 'timeseries_end_date') + startDate = config.get('timeSeries', 'startDate') + endDate = config.get('timeSeries', 'endDate') streamName = streams.find_stream(streamMap['timeSeriesStats']) - infiles = streams.readpath(streamName, startDate=startDate, + inFiles = streams.readpath(streamName, startDate=startDate, endDate=endDate) - print 'Reading files {} through {}'.format(infiles[0], infiles[-1]) + print 'Reading files {} through {}'.format(inFiles[0], inFiles[-1]) - varnames = ['iceAreaCell', 'iceVolumeCell'] + variableNames = ['iceAreaCell', 'iceVolumeCell'] - plot_titles = {'iceAreaCell': 'Sea-ice area', - 'iceVolumeCell': 'Sea-ice volume', - 'iceThickness': 'Sea-ice thickness'} + plotTitles = {'iceAreaCell': 'Sea-ice area', + 'iceVolumeCell': 'Sea-ice volume', + 'iceThickness': 'Sea-ice thickness'} - units_dict = {'iceAreaCell': '[km$^2$]', - 'iceVolumeCell': '[10$^3$ km$^3$]', - 'iceThickness': '[m]'} + unitsDictionary = {'iceAreaCell': '[km$^2$]', + 'iceVolumeCell': '[10$^3$ km$^3$]', + 'iceThickness': '[m]'} - obs_filenames = { - 'iceAreaCell': [config.get('seaIceData', 'obs_iceareaNH'), - config.get('seaIceData', 'obs_iceareaSH')], - 'iceVolumeCell': [config.get('seaIceData', 'obs_icevolNH'), - config.get('seaIceData', 'obs_icevolSH')]} + obsFileNames = { + 'iceAreaCell': [buildConfigFullPath(config, 'seaIceObservations', + subdir) + for subdir in ['areaNH', 'areaSH']], + 'iceVolumeCell': [buildConfigFullPath(config, 'seaIceObservations', + subdir) + for subdir in ['volNH', 'volSH']]} # Some plotting rules - title_font_size = config.get('seaice_timeseries', 'title_font_size') + titleFontSize = config.get('timeSeriesSeaIceAreaVol', 'titleFontSize') - casename = config.get('case', 'casename') - ref_casename_v0 = config.get('case', 'ref_casename_v0') - indir_v0data = config.get('paths', 'ref_archive_v0_seaicedir') + mainRunName = config.get('runs', 'mainRunName') + preprocessedReferenceRunName = config.get('runs', + 'preprocessedReferenceRunName') + preprocessedReferenceDirectory = config.get('seaIcePreprocessedReference', + 'baseDirectory') - compare_with_obs = config.getboolean('seaice_timeseries', - 'compare_with_obs') + compareWithObservations = config.getboolean('timeSeriesSeaIceAreaVol', + 'compareWithObservations') - plots_dir = config.get('paths', 'plots_dir') + plotsDirectory = buildConfigFullPath(config, 'output', 'plotsSubdirectory') - yr_offset = config.getint('time', 'yr_offset') + yearOffset = config.getint('time', 'yearOffset') - N_movavg = config.getint('seaice_timeseries', 'N_movavg') + movingAveragePoints = config.getint('timeSeriesSeaIceAreaVol', + 'movingAveragePoints') # first, check for a sea-ice restart file try: - inputfile = streams.readpath('restart')[0] + restartFile = streams.readpath('restart')[0] except ValueError: # get an ocean restart file, since no sea-ice restart exists - ocean_streams_filename = config.get('input', 'ocean_streams_filename') - ocean_streams = StreamsFile(ocean_streams_filename, streamsdir=indir) + oceanStreamsFileName = config.get('input', 'oceanStreamsFileName') + oceanStreams = StreamsFile(oceanStreamsFileName, + streamsdir=inDirectory) try: - inputfile = ocean_streams.readpath('restart')[0] + restartFile = oceanStreams.readpath('restart')[0] except ValueError: raise IOError('No MPAS-O or MPAS-Seaice restart file found: need ' 'at least one restart file for seaice_timeseries ' @@ -94,13 +100,13 @@ def seaice_timeseries(config, streamMap=None, variableMap=None): print ' Load sea-ice data...' # Load mesh - dsmesh = xr.open_dataset(inputfile) - dsmesh = subset_variables(dsmesh, vlist=['lonCell', 'latCell', 'areaCell']) + dsMesh = xr.open_dataset(restartFile) + dsMesh = subset_variables(dsMesh, vlist=['lonCell', 'latCell', 'areaCell']) # Load data ds = xr.open_mfdataset( - infiles, - preprocess=lambda x: preprocess_mpas(x, yearoffset=yr_offset, + inFiles, + preprocess=lambda x: preprocess_mpas(x, yearoffset=yearOffset, timestr='Time', onlyvars=['iceAreaCell', 'iceVolumeCell'], @@ -110,232 +116,254 @@ def seaice_timeseries(config, streamMap=None, variableMap=None): # convert the start and end dates to datetime objects using # the Date class, which ensures the results are within the # supported range - time_start = Date(startDate).to_datetime(yr_offset) - time_end = Date(endDate).to_datetime(yr_offset) + timeStart = Date(startDate).to_datetime(yearOffset) + timeEnd = Date(endDate).to_datetime(yearOffset) # select only the data in the specified range of years - ds = ds.sel(Time=slice(time_start, time_end)) + ds = ds.sel(Time=slice(timeStart, timeEnd)) # handle the case where the "mesh" file has a spurious time dimension - if 'Time' in dsmesh.keys(): - dsmesh = dsmesh.drop('Time') - ds = ds.merge(dsmesh) - - year_start = (pd.to_datetime(ds.Time.min().values)).year - year_end = (pd.to_datetime(ds.Time.max().values)).year - time_start = datetime.datetime(year_start, 1, 1) - time_end = datetime.datetime(year_end, 12, 31) - - if ref_casename_v0 != 'None': - infiles_v0data = '{}/icevol.{}.year*.nc'.format(indir_v0data, - ref_casename_v0) - ds_v0 = xr.open_mfdataset( - infiles_v0data, - preprocess=lambda x: preprocess_mpas(x, yearoffset=yr_offset)) - year_end_v0 = (pd.to_datetime(ds_v0.Time.max().values)).year - if year_start <= year_end_v0: - ds_v0_tslice = ds_v0.sel(Time=slice(time_start, time_end)) + if 'Time' in dsMesh.keys(): + dsMesh = dsMesh.drop('Time') + ds = ds.merge(dsMesh) + + yearStart = (pd.to_datetime(ds.Time.min().values)).year + yearEnd = (pd.to_datetime(ds.Time.max().values)).year + timeStart = datetime.datetime(yearStart, 1, 1) + timeEnd = datetime.datetime(yearEnd, 12, 31) + + if preprocessedReferenceRunName != 'None': + inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format( + preprocessedReferenceDirectory, preprocessedReferenceRunName) + dsPreprocessed = xr.open_mfdataset( + inFilesPreprocessed, + preprocess=lambda x: preprocess_mpas(x, yearoffset=yearOffset)) + preprocessedYearEnd = (pd.to_datetime( + dsPreprocessed.Time.max().values)).year + if yearStart <= preprocessedYearEnd: + dsPreprocessedTimeSlice = dsPreprocessed.sel(Time=slice(timeStart, + timeEnd)) else: - print ' Warning: v0 time series lies outside current bounds of v1 time series. Skipping it.' - ref_casename_v0 = 'None' + print ' Warning: Preprocessed time series ends before the ' \ + 'timeSeries startYear and will not be plotted.' + preprocessedReferenceRunName = 'None' # Make Northern and Southern Hemisphere partition: areaCell = ds.areaCell - ind_nh = ds.latCell > 0 - ind_sh = ds.latCell < 0 - areaCell_nh = areaCell.where(ind_nh) - areaCell_sh = areaCell.where(ind_sh) - - for varname in varnames: - obs_filenameNH = obs_filenames[varname][0] - obs_filenameSH = obs_filenames[varname][1] - plot_title = plot_titles[varname] - units = units_dict[varname] - - print ' Compute NH and SH time series of {}...'.format(varname) - if varname == 'iceThickCell': - varnamefull = 'iceVolumeCell' + maskNH = ds.latCell > 0 + maskSH = ds.latCell < 0 + areaCellNH = areaCell.where(maskNH) + areaCellSH = areaCell.where(maskSH) + + for variableName in variableNames: + obsFileNameNH = obsFileNames[variableName][0] + obsFileNameSH = obsFileNames[variableName][1] + plotTitle = plotTitles[variableName] + units = unitsDictionary[variableName] + + print ' Compute NH and SH time series of {}...'.format(variableName) + if variableName == 'iceThickCell': + variableNamefull = 'iceVolumeCell' else: - varnamefull = varname - var = ds[varnamefull] - - var_nh = var.where(ind_nh)*areaCell_nh - var_sh = var.where(ind_sh)*areaCell_sh - - ind_iceext = var > 0.15 - var_nh_iceext = var_nh.where(ind_iceext) - var_sh_iceext = var_sh.where(ind_iceext) - - if varname == 'iceAreaCell': - var_nh = var_nh.sum('nCells') - var_sh = var_sh.sum('nCells') - var_nh = 1e-6*var_nh # m^2 to km^2 - var_sh = 1e-6*var_sh # m^2 to km^2 - var_nh_iceext = 1e-6*var_nh_iceext.sum('nCells') - var_sh_iceext = 1e-6*var_sh_iceext.sum('nCells') - elif varname == 'iceVolumeCell': - var_nh = var_nh.sum('nCells') - var_sh = var_sh.sum('nCells') - var_nh = 1e-3*1e-9*var_nh # m^3 to 10^3 km^3 - var_sh = 1e-3*1e-9*var_sh # m^3 to 10^3 km^3 + variableNamefull = variableName + var = ds[variableNamefull] + + varNH = var.where(maskNH)*areaCellNH + varSH = var.where(maskSH)*areaCellSH + + maskIceExtent = var > 0.15 + varNHIceExtent = varNH.where(maskIceExtent) + varSHIceExtent = varSH.where(maskIceExtent) + + if variableName == 'iceAreaCell': + varNH = varNH.sum('nCells') + varSH = varSH.sum('nCells') + varNH = 1e-6*varNH # m^2 to km^2 + varSH = 1e-6*varSH # m^2 to km^2 + varNHIceExtent = 1e-6*varNHIceExtent.sum('nCells') + varSHIceExtent = 1e-6*varSHIceExtent.sum('nCells') + elif variableName == 'iceVolumeCell': + varNH = varNH.sum('nCells') + varSH = varSH.sum('nCells') + varNH = 1e-3*1e-9*varNH # m^3 to 10^3 km^3 + varSH = 1e-3*1e-9*varSH # m^3 to 10^3 km^3 else: - var_nh = var_nh.mean('nCells')/areaCell_nh.mean('nCells') - var_sh = var_sh.mean('nCells')/areaCell_sh.mean('nCells') + varNH = varNH.mean('nCells')/areaCellNH.mean('nCells') + varSH = varSH.mean('nCells')/areaCellSH.mean('nCells') print ' Make plots...' - xlabel = 'Time [years]' + xLabel = 'Time [years]' - if ref_casename_v0 != 'None': - figname_nh = '{}/{}NH_{}_{}.png'.format(plots_dir, varname, - casename, ref_casename_v0) - figname_sh = '{}/{}SH_{}_{}.png'.format(plots_dir, varname, - casename, ref_casename_v0) + if preprocessedReferenceRunName != 'None': + figureNameNH = '{}/{}NH_{}_{}.png'.format( + plotsDirectory, variableName, mainRunName, + preprocessedReferenceRunName) + figureNameSH = '{}/{}SH_{}_{}.png'.format( + plotsDirectory, variableName, mainRunName, + preprocessedReferenceRunName) else: - figname_nh = '{}/{}NH_{}.png'.format(plots_dir, varname, casename) - figname_sh = '{}/{}SH_{}.png'.format(plots_dir, varname, casename) - - title_nh = '{} (NH), {} (r)'.format(plot_title, casename) - title_sh = '{} (SH), {} (r)'.format(plot_title, casename) - - if compare_with_obs: - if varname == 'iceAreaCell': - title_nh = \ - '{}\nSSM/I observations, annual cycle (k)'.format(title_nh) - title_sh = \ - '{}\nSSM/I observations, annual cycle (k)'.format(title_sh) - elif varname == 'iceVolumeCell': - title_nh = '{}\nPIOMAS, annual cycle (k)'.format(title_nh) - title_sh = '{}\n'.format(title_sh) - - if ref_casename_v0 != 'None': - title_nh = '{}\n {} (b)'.format(title_nh, ref_casename_v0) - title_sh = '{}\n {} (b)'.format(title_sh, ref_casename_v0) - - if varname == 'iceAreaCell': - - if compare_with_obs: - ds_obs = xr.open_mfdataset( - obs_filenameNH, - preprocess=lambda x: preprocess_mpas(x, - yearoffset=yr_offset)) - ds_obs = remove_repeated_time_index(ds_obs) - var_nh_obs = ds_obs.IceArea - var_nh_obs = replicate_cycle(var_nh, var_nh_obs) - - ds_obs = xr.open_mfdataset( - obs_filenameSH, - preprocess=lambda x: preprocess_mpas(x, - yearoffset=yr_offset)) - ds_obs = remove_repeated_time_index(ds_obs) - var_sh_obs = ds_obs.IceArea - var_sh_obs = replicate_cycle(var_sh, var_sh_obs) - - if ref_casename_v0 != 'None': - infiles_v0data = '{}/icearea.{}.year*.nc'.format( - indir_v0data, ref_casename_v0) - ds_v0 = xr.open_mfdataset( - infiles_v0data, - preprocess=lambda x: preprocess_mpas(x, - yearoffset=yr_offset)) - ds_v0_tslice = ds_v0.sel(Time=slice(time_start, time_end)) - var_nh_v0 = ds_v0_tslice.icearea_nh - var_sh_v0 = ds_v0_tslice.icearea_sh - - elif varname == 'iceVolumeCell': - - if compare_with_obs: - ds_obs = xr.open_mfdataset( - obs_filenameNH, - preprocess=lambda x: preprocess_mpas(x, - yearoffset=yr_offset)) - ds_obs = remove_repeated_time_index(ds_obs) - var_nh_obs = ds_obs.IceVol - var_nh_obs = replicate_cycle(var_nh, var_nh_obs) - - var_sh_obs = None - - if ref_casename_v0 != 'None': - infiles_v0data = '{}/icevol.{}.year*.nc'.format( - indir_v0data, ref_casename_v0) - ds_v0 = xr.open_mfdataset( - infiles_v0data, - preprocess=lambda x: preprocess_mpas(x, - yearoffset=yr_offset)) - ds_v0_tslice = ds_v0.sel(Time=slice(time_start, time_end)) - var_nh_v0 = ds_v0_tslice.icevolume_nh - var_sh_v0 = ds_v0_tslice.icevolume_sh - - if varname in ['iceAreaCell', 'iceVolumeCell']: - if compare_with_obs: - if ref_casename_v0 != 'None': - vars_nh = [var_nh, var_nh_obs, var_nh_v0] - vars_sh = [var_sh, var_sh_obs, var_sh_v0] + figureNameNH = '{}/{}NH_{}.png'.format(plotsDirectory, + variableName, + mainRunName) + figureNameSH = '{}/{}SH_{}.png'.format(plotsDirectory, + variableName, + mainRunName) + + titleNH = '{} (NH), {} (r)'.format(plotTitle, mainRunName) + titleSH = '{} (SH), {} (r)'.format(plotTitle, mainRunName) + + if compareWithObservations: + if variableName == 'iceAreaCell': + titleNH = \ + '{}\nSSM/I observations, annual cycle (k)'.format(titleNH) + titleSH = \ + '{}\nSSM/I observations, annual cycle (k)'.format(titleSH) + elif variableName == 'iceVolumeCell': + titleNH = '{}\nPIOMAS, annual cycle (k)'.format(titleNH) + titleSH = '{}\n'.format(titleSH) + + if preprocessedReferenceRunName != 'None': + titleNH = '{}\n {} (b)'.format(titleNH, + preprocessedReferenceRunName) + titleSH = '{}\n {} (b)'.format(titleSH, + preprocessedReferenceRunName) + + if variableName == 'iceAreaCell': + + if compareWithObservations: + dsObs = xr.open_mfdataset( + obsFileNameNH, + preprocess=lambda x: preprocess_mpas( + x, yearoffset=yearOffset)) + dsObs = remove_repeated_time_index(dsObs) + varNHObs = dsObs.IceArea + varNHObs = replicate_cycle(varNH, varNHObs) + + dsObs = xr.open_mfdataset( + obsFileNameSH, + preprocess=lambda x: preprocess_mpas( + x, yearoffset=yearOffset)) + dsObs = remove_repeated_time_index(dsObs) + varSHObs = dsObs.IceArea + varSHObs = replicate_cycle(varSH, varSHObs) + + if preprocessedReferenceRunName != 'None': + inFilesPreprocessed = '{}/icearea.{}.year*.nc'.format( + preprocessedReferenceDirectory, + preprocessedReferenceRunName) + dsPreprocessed = xr.open_mfdataset( + inFilesPreprocessed, + preprocess=lambda x: preprocess_mpas( + x, yearoffset=yearOffset)) + dsPreprocessedTimeSlice = dsPreprocessed.sel( + Time=slice(timeStart, timeEnd)) + varNHPreprocessed = dsPreprocessedTimeSlice.icearea_nh + varSHPreprocessed = dsPreprocessedTimeSlice.icearea_sh + + elif variableName == 'iceVolumeCell': + + if compareWithObservations: + dsObs = xr.open_mfdataset( + obsFileNameNH, + preprocess=lambda x: preprocess_mpas( + x, yearoffset=yearOffset)) + dsObs = remove_repeated_time_index(dsObs) + varNHObs = dsObs.IceVol + varNHObs = replicate_cycle(varNH, varNHObs) + + varSHObs = None + + if preprocessedReferenceRunName != 'None': + inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format( + preprocessedReferenceDirectory, + preprocessedReferenceRunName) + dsPreprocessed = xr.open_mfdataset( + inFilesPreprocessed, + preprocess=lambda x: preprocess_mpas( + x, yearoffset=yearOffset)) + dsPreprocessedTimeSlice = dsPreprocessed.sel( + Time=slice(timeStart, timeEnd)) + varNHPreprocessed = dsPreprocessedTimeSlice.icevolume_nh + varSHPreprocessed = dsPreprocessedTimeSlice.icevolume_sh + + if variableName in ['iceAreaCell', 'iceVolumeCell']: + if compareWithObservations: + if preprocessedReferenceRunName != 'None': + varsNH = [varNH, varNHObs, varNHPreprocessed] + varsSH = [varSH, varSHObs, varSHPreprocessed] lineStyles = ['r-', 'k-', 'b-'] lineWidths = [1.2, 1.2, 1.2] else: # just v1 model and obs - vars_nh = [var_nh, var_nh_obs] - vars_sh = [var_sh, var_sh_obs] + varsNH = [varNH, varNHObs] + varsSH = [varSH, varSHObs] lineStyles = ['r-', 'k-'] lineWidths = [1.2, 1.2] - elif ref_casename_v0 != 'None': + elif preprocessedReferenceRunName != 'None': # just v1 and v0 models - vars_nh = [var_nh, var_nh_v0] - vars_sh = [var_sh, var_sh_v0] + varsNH = [varNH, varNHPreprocessed] + varsSH = [varSH, varSHPreprocessed] lineStyles = ['r-', 'b-'] lineWidths = [1.2, 1.2] - if compare_with_obs or ref_casename_v0 != 'None': + if (compareWithObservations or + preprocessedReferenceRunName != 'None'): # separate plots for nothern and southern hemispheres - timeseries_analysis_plot(config, vars_nh, N_movavg, title_nh, - xlabel, units, figname_nh, + timeseries_analysis_plot(config, varsNH, movingAveragePoints, + titleNH, + xLabel, units, figureNameNH, lineStyles=lineStyles, lineWidths=lineWidths, - title_font_size=title_font_size) - timeseries_analysis_plot(config, vars_sh, N_movavg, title_sh, - xlabel, units, figname_sh, + titleFontSize=titleFontSize) + timeseries_analysis_plot(config, varsSH, movingAveragePoints, + titleSH, + xLabel, units, figureNameSH, lineStyles=lineStyles, lineWidths=lineWidths, - title_font_size=title_font_size) + titleFontSize=titleFontSize) else: # we will combine north and south onto a single graph - figname = '{}/{}.{}.png'.format(plots_dir, casename, varname) - title = '{}, NH (r), SH (k)\n{}'.format(plot_title, casename) - timeseries_analysis_plot(config, [var_nh, var_sh], N_movavg, - title, xlabel, units, figname, + figureName = '{}/{}.{}.png'.format(plotsDirectory, mainRunName, + variableName) + title = '{}, NH (r), SH (k)\n{}'.format(plotTitle, mainRunName) + timeseries_analysis_plot(config, [varNH, varSH], + movingAveragePoints, + title, xLabel, units, figureName, lineStyles=['r-', 'k-'], lineWidths=[1.2, 1.2], - title_font_size=title_font_size) + titleFontSize=titleFontSize) - elif varname == 'iceThickCell': + elif variableName == 'iceThickCell': - figname = '{}/{}.{}.png'.format(plots_dir, casename, varname) - title = '{} NH (r), SH (k)\n{}'.format(plot_title, casename) - timeseries_analysis_plot(config, [var_nh, var_sh], N_movavg, title, - xlabel, units, figname, + figureName = '{}/{}.{}.png'.format(plotsDirectory, mainRunName, + variableName) + title = '{} NH (r), SH (k)\n{}'.format(plotTitle, mainRunName) + timeseries_analysis_plot(config, [varNH, varSH], + movingAveragePoints, title, + xLabel, units, figureName, lineStyles=['r-', 'k-'], lineWidths=[1.2, 1.2], - title_font_size=title_font_size) + titleFontSize=titleFontSize) else: raise ValueError( - 'varname variable {} not supported for plotting'.format( - varname)) + 'variableName variable {} not supported for plotting'.format( + variableName)) -def replicate_cycle(ds, ds_toreplicate): - dsshift = ds_toreplicate.copy() - shiftT = ((dsshift.Time.max() - dsshift.Time.min()) + - (dsshift.Time.isel(Time=1) - dsshift.Time.isel(Time=0))) - startIndex = int(np.floor((ds.Time.min()-ds_toreplicate.Time.min())/shiftT)) - endIndex = int(np.ceil((ds.Time.max()-ds_toreplicate.Time.min())/shiftT)) - dsshift['Time'] = dsshift['Time'] + startIndex*shiftT +def replicate_cycle(ds, dsToReplicate): + dsShift = dsToReplicate.copy() + shiftT = ((dsShift.Time.max() - dsShift.Time.min()) + + (dsShift.Time.isel(Time=1) - dsShift.Time.isel(Time=0))) + startIndex = int(np.floor((ds.Time.min()-dsToReplicate.Time.min())/shiftT)) + endIndex = int(np.ceil((ds.Time.max()-dsToReplicate.Time.min())/shiftT)) + dsShift['Time'] = dsShift['Time'] + startIndex*shiftT # replicate cycle: for cycleIndex in range(startIndex, endIndex): - dsnew = ds_toreplicate.copy() - dsnew['Time'] = dsnew['Time'] + (cycleIndex+1)*shiftT - dsshift = xr.concat([dsshift, dsnew], dim='Time') - # constrict replicated ds_short to same time dimension as ds_long: - dsshift = dsshift.sel(Time=ds.Time.values, method='nearest') - return dsshift + dsNew = dsToReplicate.copy() + dsNew['Time'] = dsNew['Time'] + (cycleIndex+1)*shiftT + dsShift = xr.concat([dsShift, dsNew], dim='Time') + # constrict replicated dsSHort to same time dimension as ds_long: + dsShift = dsShift.sel(Time=ds.Time.values, method='nearest') + return dsShift diff --git a/mpas_analysis/shared/io/utility.py b/mpas_analysis/shared/io/utility.py index 5534cb344..a1f5703ec 100644 --- a/mpas_analysis/shared/io/utility.py +++ b/mpas_analysis/shared/io/utility.py @@ -2,16 +2,18 @@ """ IO utility functions -Phillip J. Wolfram -10/25/2016 +Phillip J. Wolfram, Xylar Asay-Davis + +Last Modified: 02/02/2017 """ import glob + def paths(*args): - """ + """ Returns glob'd paths in list for arbitrary number of function arguments. - Note, each expanded set of paths is sorted. + Note, each expanded set of paths is sorted. Phillip J. Wolfram 10/25/2016 @@ -21,4 +23,24 @@ def paths(*args): paths += sorted(glob.glob(aargs)) return paths + +def buildConfigFullPath(config, section, relativePathOption): + """ + Returns a full path from a base directory and a relative path + + `config` is an instance of a ConfigParser + + `section` is the name of a section in `config`, which must have an + option `baseDirectory` + + `relativePathOption` is the name of an option in `section` that + points to the name of a relative path within `baseDirectory` + + Xylar Asay-Davis + + Last Modified: 02/02/2017 + """ + return '{}/{}'.format(config.get(section, 'baseDirectory'), + config.get(section, relativePathOption)) + # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/mpas_analysis/shared/plot/plotting.py b/mpas_analysis/shared/plot/plotting.py index ee72a1834..41f330c5e 100644 --- a/mpas_analysis/shared/plot/plotting.py +++ b/mpas_analysis/shared/plot/plotting.py @@ -4,10 +4,48 @@ import matplotlib.colors as cols import numpy as np +""" +Plotting utilities, including routine for plotting: + * time series (and comparing with reference data sets) + * regridded horizontal fields (and comparing with reference data sets) + +Author: Xylar Asay-Davis + +Last Modified: 02/02/2017 +""" + def timeseries_analysis_plot(config, dsvalues, N, title, xlabel, ylabel, fileout, - lineStyles, lineWidths, title_font_size=None, + lineStyles, lineWidths, titleFontSize=None, figsize=(15,6), dpi=300): + """ + Plots the list of time series data sets and stores the result in an image + file. + + config is an instance of ConfigParser + + dsvalues is a list of xarray DataSets to be plotted + + N is the numer of time points over which to perform a moving average + + title is a string with the title of the plot + + xlabel and ylabel are strings with axes labels + + fileout is the file name to be written + + lineStyles and lineWidths are lists of strings controling line style/width + + titleFontSize is the size of the title font + + figsize is the size of the figure in inches + + dpi is the number of dots per inch of the figure + + Author: Xylar Asay-Davis + + Last Modified: 02/02/2017 + """ plt.figure(figsize=figsize, dpi=dpi) for dsIndex in range(len(dsvalues)): @@ -17,13 +55,13 @@ def timeseries_analysis_plot(config, dsvalues, N, title, xlabel, ylabel, fileout mean = pd.Series.rolling(dsvalue.to_pandas(), N, center=True).mean() mean.plot(style=lineStyles[dsIndex], lw=lineWidths[dsIndex]) - if title_font_size is None: - title_font_size = config.get('plot', 'title_font_size') + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') - axis_font = {'size':config.get('plot', 'axis_font_size')} - title_font = {'size': title_font_size, - 'color':config.get('plot', 'title_font_color'), - 'weight':config.get('plot', 'title_font_weight')} + axis_font = {'size':config.get('plot', 'axisFontSize')} + title_font = {'size': titleFontSize, + 'color':config.get('plot', 'titleFontColor'), + 'weight':config.get('plot', 'titleFontWeight')} if (title != None): plt.title(title, **title_font) if (xlabel != None): @@ -57,20 +95,64 @@ def plot_polar_comparison( obsTitle = "Observations", diffTitle = "Model-Observations", cbarlabel = "units", - title_font_size = None, + titleFontSize = None, figsize = (8,22), dpi = 300): + """ + Plots a data set around either the north or south pole. + + config is an instance of ConfigParser + + Lons and Lats are the longitude and latitude arrays + + modelArray and obsArray are the model and observational data sets + + diffArray is the difference between modelArray and obsArray + + cmapModelObs is the colormap of model and observations + + clevsModleObs are contour values for model and observations + + cmapDiff is the colormap of difference + + clevsDiff are contour values fordifference + + fileout is the file name to be written + + title is a string with the subtitle of the plot + + plotProjection is a Basemap projection for the plot + + modelTitle is the title of the model plot + + obsTitle is the title of the observations plot + + diffTitle is the title of the difference plot + + cbarlabel is the label on the colorbar + + titleFontSize is the size of the title font + + figsize is the size of the figure in inches + + dpi is the number of dots per inch of the figure + + Author: Xylar Asay-Davis + + Last Modified: 02/02/2017 + """ + # set up figure fig = plt.figure(figsize=figsize, dpi=dpi) if (title is not None): - if title_font_size is None: - title_font_size = config.get('plot', 'title_font_size') - title_font = {'size': title_font_size, - 'color':config.get('plot', 'title_font_color'), - 'weight':config.get('plot', 'title_font_weight')} + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + title_font = {'size': titleFontSize, + 'color':config.get('plot', 'titleFontColor'), + 'weight':config.get('plot', 'titleFontWeight')} fig.suptitle(title, y=0.95, **title_font) - axis_font = {'size':config.get('plot', 'axis_font_size')} + axis_font = {'size':config.get('plot', 'axisFontSize')} m = Basemap(projection=plotProjection,boundinglat=latmin,lon_0=lon0,resolution='l') x, y = m(Lons, Lats) # compute map proj coordinates @@ -137,28 +219,69 @@ def plot_global_comparison( obsTitle = "Observations", diffTitle = "Model-Observations", cbarlabel = "units", - title_font_size = None, + titleFontSize = None, figsize = (8,12), dpi = 300): + """ + Plots a data set as a longitude/latitude map. + + config is an instance of ConfigParser + + Lons and Lats are the longitude and latitude arrays + + modelArray and obsArray are the model and observational data sets + + diffArray is the difference between modelArray and obsArray + + cmapModelObs is the colormap of model and observations + + clevsModleObs are contour values for model and observations + + cmapDiff is the colormap of difference + + clevsDiff are contour values fordifference + + fileout is the file name to be written + + title is a string with the subtitle of the plot + + modelTitle is the title of the model plot + + obsTitle is the title of the observations plot + + diffTitle is the title of the difference plot + + cbarlabel is the label on the colorbar + + titleFontSize is the size of the title font + + figsize is the size of the figure in inches + + dpi is the number of dots per inch of the figure + + Author: Xylar Asay-Davis + + Last Modified: 02/02/2017 + """ # set up figure fig = plt.figure(figsize=figsize, dpi=dpi) if (title is not None): - if title_font_size is None: - title_font_size = config.get('plot', 'title_font_size') - title_font = {'size': title_font_size, - 'color':config.get('plot', 'title_font_color'), - 'weight':config.get('plot', 'title_font_weight')} + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + title_font = {'size': titleFontSize, + 'color':config.get('plot', 'titleFontColor'), + 'weight':config.get('plot', 'titleFontWeight')} fig.suptitle(title, y=0.95, **title_font) - axis_font = {'size':config.get('plot', 'axis_font_size')} - + axis_font = {'size':config.get('plot', 'axisFontSize')} + m = Basemap(projection='cyl',llcrnrlat=-85,urcrnrlat=86,llcrnrlon=-180,urcrnrlon=181,resolution='l') #m = Basemap(projection='robin',lon_0=200,resolution='l') # this doesn't work because lons are -180 to 180.. x, y = m(Lons, Lats) # compute map proj coordinates normModelObs = cols.BoundaryNorm(clevsModelObs, cmapModelObs.N) normDiff = cols.BoundaryNorm(clevsDiff, cmapDiff.N) - + plt.subplot(3,1,1) plt.title(modelTitle, y=1.06, **axis_font) m.drawcoastlines() @@ -176,19 +299,19 @@ def plot_global_comparison( m.drawcoastlines() m.fillcontinents(color='grey',lake_color='white') m.drawparallels(np.arange(-80.,80.,20.),labels=[True,False,False,False]) - m.drawmeridians(np.arange(-180.,180.,40.),labels=[False,False,False,True]) + m.drawmeridians(np.arange(-180.,180.,40.),labels=[False,False,False,True]) cs = m.contourf(x,y,obsArray,cmap=cmapModelObs,norm=normModelObs,spacing='uniform',levels=clevsModelObs,extend='both') cbar = m.colorbar(cs,location='right',pad="5%",spacing='uniform',ticks=clevsModelObs,boundaries=clevsModelObs) #cbar = m.colorbar(cs,location='right',pad="5%",spacing='uniform',extendfrac='auto', # extendrect='True',ticks=clevsModelObs, boundaries=clevsModelObs) cbar.set_label(cbarlabel) - + plt.subplot(3,1,3) plt.title(diffTitle, y=1.06, **axis_font) m.drawcoastlines() m.fillcontinents(color='grey',lake_color='white') m.drawparallels(np.arange(-80.,80.,20.),labels=[True,False,False,False]) - m.drawmeridians(np.arange(-180.,180.,40.),labels=[False,False,False,True]) + m.drawmeridians(np.arange(-180.,180.,40.),labels=[False,False,False,True]) cs = m.contourf(x,y,diffArray,cmap=cmapDiff,norm=normDiff,spacing='uniform',levels=clevsDiff,extend='both') cbar = m.colorbar(cs,location='right',pad="5%",spacing='uniform',ticks=clevsDiff,boundaries=clevsModelObs) #cbar = m.colorbar(cs,location='right',pad="5%",spacing='uniform',extendfrac='auto', diff --git a/mpas_analysis/test/test_run_analysis_utils.py b/mpas_analysis/test/test_run_analysis_utils.py new file mode 100644 index 000000000..abec13c0e --- /dev/null +++ b/mpas_analysis/test/test_run_analysis_utils.py @@ -0,0 +1,143 @@ +""" +Unit tests for utility functions in run_analysis + +Xylar Asay-Davis +02/03/2017 +""" + +import pytest +from mpas_analysis.test import TestCase +from run_analysis import checkGenerate +from mpas_analysis.configuration.MpasAnalysisConfigParser \ + import MpasAnalysisConfigParser + + +class TestRunAnalysisUtils(TestCase): + + def test_checkGenerate(self): + + def doTest(generate, expectedResults): + config = MpasAnalysisConfigParser() + config.add_section('output') + config.set('output', 'generate', generate) + for analysisName in expectedResults: + expectedResult = expectedResults[analysisName] + result = checkGenerate( + config, analysisName=analysisName, + mpasCore=cores[analysisName], + analysisCategory=categories[analysisName]) + self.assertEqual(result, expectedResult) + + # Comments from config.template about how generate works: + # + # a list of analyses to generate. Valid names are: + # 'timeSeriesOHC', 'timeSeriesSST', 'regriddedSST', + # 'regriddedSSS', 'regriddedMLD', 'timeSeriesSeaIceAreaVol', + # 'regriddedSeaIceConcThick' + # the following shortcuts exist: + # 'all' -- all analyses will be run + # 'all_timeSeries' -- all time-series analyses will be run + # 'all_regriddedHorizontal' -- all analyses involving regridded + # horizontal fields will be run + # 'all_ocean' -- all ocean analyses will be run + # 'all_seaIce' -- all sea-ice analyses will be run + # 'no_timeSeriesOHC' -- skip 'timeSeriesOHC' (and similarly with the + # other analyses). + # 'no_ocean', 'no_timeSeries', etc. -- in analogy to 'all_*', skip + # the given category of analysis + # an equivalent syntax can be used on the command line to override this + # option: + # ./run_analysis.py config.analysis --generate \ + # all,no_ocean,all_timeSeries + + cores = {'timeSeriesOHC': 'ocean', + 'timeSeriesSST': 'ocean', + 'timeSeriesNino34': 'ocean', + 'timeSeriesMHT': 'ocean', + 'timeSeriesMOC': 'ocean', + 'regriddedSST': 'ocean', + 'regriddedMLD': 'ocean', + 'regriddedSSS': 'ocean', + 'timeSeriesSeaIceAreaVol': 'seaIce', + 'regriddedSeaIceConcThick': 'seaIce'} + + categories = {'timeSeriesOHC': 'timeSeries', + 'timeSeriesSST': 'timeSeries', + 'timeSeriesNino34': 'timeSeries', + 'timeSeriesMHT': 'timeSeries', + 'timeSeriesMOC': 'timeSeries', + 'regriddedSST': 'regriddedHorizontal', + 'regriddedMLD': 'regriddedHorizontal', + 'regriddedSSS': 'regriddedHorizontal', + 'timeSeriesSeaIceAreaVol': 'timeSeries', + 'regriddedSeaIceConcThick': 'regriddedHorizontal'} + + # test 'all' + expectedResults = {} + for analysisName in cores: + expectedResults[analysisName] = True + doTest("['all']", expectedResults) + + # test 'all_' and ['all', 'no_'] + for category in set(categories.values()): + expectedResults = {} + for analysisName in categories: + expectedResults[analysisName] = \ + (categories[analysisName] == category) + doTest("['all_{}']".format(category), expectedResults) + + expectedResults = {} + for analysisName in categories: + expectedResults[analysisName] = \ + (categories[analysisName] != category) + doTest("['all', 'no_{}']".format(category), expectedResults) + + # test 'all_' and ['all', 'no_'] + for core in set(cores.values()): + expectedResults = {} + for analysisName in cores: + expectedResults[analysisName] = \ + (cores[analysisName] == core) + doTest("['all_{}']".format(core), expectedResults) + + expectedResults = {} + for analysisName in cores: + expectedResults[analysisName] = \ + (cores[analysisName] != core) + doTest("['all','no_{}']".format(core), expectedResults) + + # test each analysis individually + for analysisName in cores: + expectedResults = {} + for otherAnalysis in cores: + expectedResults[otherAnalysis] = \ + (analysisName == otherAnalysis) + doTest("['{}']".format(analysisName), expectedResults) + + # test a non-existent analysis + expectedResults = {} + for analysisName in cores: + expectedResults[analysisName] = False + doTest("['fakeAnalysis']", expectedResults) + + # test ['all', 'no_ocean', 'all_timeSeries'] + expectedResults = {} + for analysisName in cores: + expectedResults[analysisName] = True + for analysisName in cores: + if cores[analysisName] == 'ocean': + expectedResults[analysisName] = False + for analysisName in categories: + if categories[analysisName] == 'timeSeries': + expectedResults[analysisName] = True + doTest("['all', 'no_ocean', 'all_timeSeries']", expectedResults) + + # test ['all', 'no_timeSeriesOHC'] + expectedResults = {} + for analysisName in cores: + expectedResults[analysisName] = True + expectedResults['timeSeriesOHC'] = False + doTest("['all', 'no_timeSeriesOHC']", expectedResults) + + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/run_analysis.py b/run_analysis.py index c02848818..1fe3a4c90 100755 --- a/run_analysis.py +++ b/run_analysis.py @@ -1,16 +1,16 @@ #!/usr/bin/env python """ -Runs MPAS-Analysis via configuration file `config.analysis` specifying analysis -options. +Runs MPAS-Analysis via a configuration file (e.g. `config.analysis`) +specifying analysis options. Author: Xylar Asay-Davis, Phillip J. Wolfram -Last Modified: 12/06/2016 +Last Modified: 02/02/2017 """ import os -import sys import matplotlib as mpl +import argparse from mpas_analysis.configuration.MpasAnalysisConfigParser \ import MpasAnalysisConfigParser @@ -21,70 +21,120 @@ from mpas_analysis.sea_ice.variable_stream_map import seaIceStreamMap, \ seaIceVariableMap +from mpas_analysis.shared.io.utility import buildConfigFullPath -def path_existence(config, section, option, ignorestr=None): # {{{ - inpath = config.get(section, option) - if not (os.path.isdir(inpath) or os.path.isfile(inpath)): - # assumes that path locations of ignorestr won't return an error, e.g., - # ignorestr="none" is a key word to indicate the path or file is - # optional and is not needed - if inpath == ignorestr: - return False - errmsg = "Path %s not found. Exiting..." % inpath - raise SystemExit(errmsg) - return inpath # }}} +def checkPathExists(path): # {{{ + """ + Raise an exception if the given path does not exist. -def makedirs(inpath): # {{{ - if not os.path.exists(inpath): - os.makedirs(inpath) - return inpath # }}} + Author: Xylar Asay-Davis + Last Modified: 02/02/2017 + """ + if not (os.path.isdir(path) or os.path.isfile(path)): + raise OSError('Path {} not found'.format(path)) +# }}} + + +def makeDirectories(path): # {{{ + """ + Make the given path if it does not already exist. + + Returns the path unchanged. + + Author: Xylar Asay-Davis + Last Modified: 02/02/2017 + """ + + try: + os.makedirs(path) + except OSError: + pass + return path # }}} + + +def checkGenerate(config, analysisName, mpasCore, analysisCategory=None): + # {{{ + """ + determine if a particular analysis of a particular core and (optionally) + category should be generated. + + Author: Xylar Asay-Davis + Last Modified: 02/02/2017 + """ + generateList = config.getExpression('output', 'generate') + generate = False + for element in generateList: + if '_' in element: + (prefix, suffix) = element.split('_', 1) + else: + prefix = element + suffix = None + + if prefix == 'all': + if (suffix in [mpasCore, analysisCategory]) or (suffix is None): + generate = True + elif prefix == 'no': + if suffix in [analysisName, mpasCore, analysisCategory]: + generate = False + elif element == analysisName: + generate = True + + return generate # }}} def analysis(config): # {{{ # set default values of start and end dates for climotologies and # timeseries - if config.has_option('time', 'climo_yr1') and \ - config.has_option('time', 'climo_yr2'): + if config.has_option('climatology', 'startYear') and \ + config.has_option('climatology', 'endYear'): startDate = '{:04d}-01-01_00:00:00'.format( - config.getint('time', 'climo_yr1')) + config.getint('climatology', 'startYear')) endDate = '{:04d}-12-31_23:59:59'.format( - config.getint('time', 'climo_yr2')) + config.getint('climatology', 'endYear')) # use 'getWithDefaults' to set start and end dates without replacing # them if they already exist - config.getWithDefault('time', 'climo_start_date', startDate) - config.getWithDefault('time', 'climo_end_date', endDate) + config.getWithDefault('climatology', 'startDate', startDate) + config.getWithDefault('climatology', 'endDate', endDate) - if config.has_option('time', 'timeseries_yr1') and \ - config.has_option('time', 'timeseries_yr2'): + if config.has_option('timeSeries', 'startYear') and \ + config.has_option('timeSeries', 'endYear'): startDate = '{:04d}-01-01_00:00:00'.format( - config.getint('time', 'timeseries_yr1')) + config.getint('timeSeries', 'startYear')) endDate = '{:04d}-12-31_23:59:59'.format( - config.getint('time', 'timeseries_yr2')) + config.getint('timeSeries', 'endYear')) # use 'getWithDefaults' to set start and end dates without replacing - # them if they already exist - config.getWithDefault('time', 'timeseries_start_date', startDate) - config.getWithDefault('time', 'timeseries_end_date', endDate) + # them if they already timeseries + config.getWithDefault('timeSeries', 'startDate', startDate) + config.getWithDefault('timeSeries', 'endDate', endDate) # Checks on directory/files existence: - if config.get('case', 'ref_casename_v0') != 'None': - path_existence(config, 'paths', 'ref_archive_v0_ocndir') - path_existence(config, 'paths', 'ref_archive_v0_seaicedir') - - generate_seaice_timeseries = config.getboolean('seaice_timeseries', - 'generate') - seaice_compare_obs = config.getboolean('seaice_timeseries', - 'compare_with_obs') - generate_seaice_modelvsobs = config.getboolean('seaice_modelvsobs', - 'generate') - if (generate_seaice_timeseries and seaice_compare_obs) or \ - generate_seaice_modelvsobs: + if config.get('runs', 'preprocessedReferenceRunName') != 'None': + checkPathExists(config.get('oceanPreprocessedReference', + 'baseDirectory')) + checkPathExists(config.get('seaIcePreprocessedReference', + 'baseDirectory')) + + generateTimeSeriesSeaIce = checkGenerate( + config, analysisName='timeSeriesSeaIceAreaVol', mpasCore='seaIce', + analysisCategory='timeSeries') + compareTimeSeriesSeaIceWithObservations = config.getboolean( + 'timeSeriesSeaIceAreaVol', 'compareWithObservations') + generateRegriddedSeaIce = checkGenerate( + config, analysisName='regriddedSeaIceConcThick', mpasCore='seaIce', + analysisCategory='regriddedHorizontal') + + if ((generateTimeSeriesSeaIce and + compareTimeSeriesSeaIceWithObservations) or generateRegriddedSeaIce): # we will need sea-ice observations. Make sure they're there - for obsfile in ['obs_iceareaNH', 'obs_iceareaSH', 'obs_icevolNH', - 'obs_icevolSH']: - path_existence(config, 'seaIceData', obsfile, ignorestr='none') + baseDirectory = config.get('seaIceObservations', 'baseDirectory') + for observationName in ['areaNH', 'areaSH', 'volNH', 'volSH']: + fileName = config.get('seaIceObservations', observationName) + if fileName.lower() == 'none': + continue + checkPathExists('{}/{}'.format(baseDirectory, fileName)) - makedirs(config.get('paths', 'plots_dir')) + makeDirectories(buildConfigFullPath(config, 'output', 'plotsSubdirectory')) # choose the right rendering backend, depending on whether we're displaying # to the screen @@ -95,69 +145,79 @@ def analysis(config): # {{{ # analysis can only be imported after the right MPL renderer is selected # GENERATE OCEAN DIAGNOSTICS - if config.getboolean('ohc_timeseries', 'generate'): + if checkGenerate(config, analysisName='timeSeriesOHC', mpasCore='ocean', + analysisCategory='timeSeries'): print "" print "Plotting OHC time series..." from mpas_analysis.ocean.ohc_timeseries import ohc_timeseries ohc_timeseries(config, streamMap=oceanStreamMap, variableMap=oceanVariableMap) - if config.getboolean('sst_timeseries', 'generate'): + if checkGenerate(config, analysisName='timeSeriesSST', mpasCore='ocean', + analysisCategory='timeSeries'): print "" print "Plotting SST time series..." from mpas_analysis.ocean.sst_timeseries import sst_timeseries sst_timeseries(config, streamMap=oceanStreamMap, variableMap=oceanVariableMap) - if config.getboolean('nino34_timeseries', 'generate'): - print "" - print "Plotting Nino3.4 time series..." - # from mpas_analysis.ocean.nino34_timeseries import nino34_timeseries - # nino34_timeseries(config) - - if config.getboolean('mht_timeseries', 'generate'): - print "" - print "Plotting Meridional Heat Transport (MHT)..." - # from mpas_analysis.ocean.mht_timeseries import mht_timeseries - # mht_timeseries(config) - - if config.getboolean('moc_timeseries', 'generate'): - print "" - print "Plotting Meridional Overturning Circulation (MOC)..." - # from mpas_analysis.ocean.moc_timeseries import moc_timeseries - # moc_timeseries(config) - - if config.getboolean('sst_modelvsobs', 'generate'): +# if checkGenerate(config, analysisName='timeSeriesNino34', +# mpasCore='ocean', analysisCategory='timeSeries'): +# print "" +# print "Plotting Nino3.4 time series..." +# from mpas_analysis.ocean.nino34_timeseries import nino34_timeseries +# nino34_timeseries(config) + +# if checkGenerate(config, analysisName='timeSeriesMHT', mpasCore='ocean', +# analysisCategory='timeSeries'): +# print "" +# print "Plotting Meridional Heat Transport (MHT)..." +# from mpas_analysis.ocean.mht_timeseries import mht_timeseries +# mht_timeseries(config) + +# if checkGenerate(config, analysisName='timeSeriesMOC', mpasCore='ocean', +# analysisCategory='timeSeries'): +# print "" +# print "Plotting Meridional Overturning Circulation (MOC)..." +# from mpas_analysis.ocean.moc_timeseries import moc_timeseries +# moc_timeseries(config) + + if checkGenerate(config, analysisName='regriddedSST', mpasCore='ocean', + analysisCategory='regriddedHorizontal'): print "" print "Plotting 2-d maps of SST climatologies..." from mpas_analysis.ocean.ocean_modelvsobs import ocn_modelvsobs ocn_modelvsobs(config, 'sst', streamMap=oceanStreamMap, variableMap=oceanVariableMap) - if config.getboolean('mld_modelvsobs', 'generate'): + if checkGenerate(config, analysisName='regriddedMLD', mpasCore='ocean', + analysisCategory='regriddedHorizontal'): print "" print "Plotting 2-d maps of MLD climatologies..." from mpas_analysis.ocean.ocean_modelvsobs import ocn_modelvsobs ocn_modelvsobs(config, 'mld', streamMap=oceanStreamMap, variableMap=oceanVariableMap) - if config.getboolean('sss_modelvsobs', 'generate'): + if checkGenerate(config, analysisName='regriddedSSS', mpasCore='ocean', + analysisCategory='regriddedHorizontal'): print "" print "Plotting 2-d maps of SSS climatologies..." from mpas_analysis.ocean.ocean_modelvsobs import ocn_modelvsobs ocn_modelvsobs(config, 'sss', streamMap=oceanStreamMap, variableMap=oceanVariableMap) - # GENERATE SEA-ICE DIAGNOSTICS - if config.getboolean('seaice_timeseries', 'generate'): + if checkGenerate(config, analysisName='timeSeriesSeaIceAreaVol', + mpasCore='seaIce', analysisCategory='timeSeries'): print "" print "Plotting sea-ice area and volume time series..." from mpas_analysis.sea_ice.timeseries import seaice_timeseries seaice_timeseries(config, streamMap=seaIceStreamMap, variableMap=seaIceVariableMap) - if config.getboolean('seaice_modelvsobs', 'generate'): + if checkGenerate(config, analysisName='regriddedSeaIceConcThick', + mpasCore='seaIce', + analysisCategory='regriddedHorizontal'): print "" print "Plotting 2-d maps of sea-ice concentration and thickness " \ "climatologies..." @@ -174,14 +234,27 @@ def analysis(config): # {{{ if __name__ == "__main__": - # process command line arguments and run analysis from configuration - if len(sys.argv) <= 1: - print "usage: %s []" % sys.argv[0] - exit(1) + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-g", "--generate", dest="generate", + help="A list of analysis modules to generate " + "(nearly identical generate option in config file).", + metavar="ANALYSIS1[,ANALYSIS2,ANALYSIS3,...]") + parser.add_argument('configFiles', metavar='CONFIG', + type=str, nargs='+', help='config file') + args = parser.parse_args() - configFileNames = sys.argv[1:] config = MpasAnalysisConfigParser() - config.read(configFileNames) + config.read(args.configFiles) + + if args.generate: + # overwrite the 'generate' in config with a string that parses to + # a list of string + generateList = args.generate.split(',') + generateString = ', '.join(["'{}'".format(element) + for element in generateList]) + generateString = '[{}]'.format(generateString) + config.set('output', 'generate', generateString) analysis(config)