Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 9 additions & 7 deletions configs/edison/job_script.edison.bash
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
##SBATCH --partition=debug
# change number of nodes to change the number of parallel tasks
# (anything between 1 and the total number of tasks to run)
#SBATCH --nodes=10
#SBATCH --nodes=1
#SBATCH --time=1:00:00
#SBATCH --account=acme
#SBATCH --job-name=mpas_analysis
Expand All @@ -23,7 +23,6 @@ export OMP_NUM_THREADS=1
module unload python python/base
module use /global/project/projectdirs/acme/software/modulefiles/all
module load python/anaconda-2.7-acme
export PATH=/global/homes/z/zender/bin_${NERSC_HOST}:${PATH}

# MPAS/ACME job to be analyzed, including paths to simulation data and
# observations. Change this name and path as needed
Expand All @@ -34,7 +33,9 @@ command_prefix="srun -N 1 -n 1"
# containing run_mpas_analysis
mpas_analysis_dir="."
# one parallel task per node by default
parallel_task_count=$SLURM_JOB_NUM_NODES
parallel_task_count=12
# ncclimo can run with 1 (serial) or 12 (bck) threads
ncclimo_mode=bck

if [ ! -f $run_config_file ]; then
echo "File $run_config_file not found!"
Expand All @@ -58,12 +59,13 @@ cat <<EOF > $job_config_file
# the number of parallel tasks (1 means tasks run in serial, the default)
parallelTaskCount = $parallel_task_count

# Prefix on the commnd line before a parallel task (e.g. 'srun -n 1 python')
# Default is no prefix (run_mpas_analysis is executed directly)
commandPrefix = $command_prefix
# the parallelism mode in ncclimo ("serial" or "bck")
# Set this to "bck" (background parallelism) if running on a machine that can
# handle 12 simultaneous processes, one for each monthly climatology.
ncclimoParallelMode = $ncclimo_mode

EOF

$mpas_analysis_dir/run_mpas_analysis $run_config_file \
$command_prefix $mpas_analysis_dir/run_mpas_analysis $run_config_file \
$job_config_file

11 changes: 7 additions & 4 deletions configs/job_script.default.bash
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ mpas_analysis_dir="."
# the number of parallel tasks (anything between 1 and the total number
# of tasks to run)
parallel_task_count=8
# ncclimo can run with 1 (serial) or 12 (bck) threads
ncclimo_mode=bck

if [ ! -f $run_config_file ]; then
echo "File $run_config_file not found!"
Expand All @@ -33,13 +35,14 @@ cat <<EOF > $job_config_file
# the number of parallel tasks (1 means tasks run in serial, the default)
parallelTaskCount = $parallel_task_count

# Prefix on the commnd line before a parallel task (e.g. 'srun -n 1 python')
# Default is no prefix (run_mpas_analysis is executed directly)
commandPrefix = $command_prefix
# the parallelism mode in ncclimo ("serial" or "bck")
# Set this to "bck" (background parallelism) if running on a machine that can
# handle 12 simultaneous processes, one for each monthly climatology.
ncclimoParallelMode = $ncclimo_mode

EOF

$mpas_analysis_dir/run_mpas_analysis $run_config_file \
$command_prefix $mpas_analysis_dir/run_mpas_analysis $run_config_file \
$job_config_file

# commend this out if you want to keep the config file, e.g. for debugging
Expand Down
17 changes: 11 additions & 6 deletions configs/lanl/job_script.lanl.bash
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

# change number of nodes to change the number of parallel tasks
# (anything between 1 and the total number of tasks to run)
#SBATCH --nodes=10
#SBATCH --nodes=1
#SBATCH --time=1:00:00
#SBATCH --account=climateacme
#SBATCH --job-name=mpas_analysis
Expand All @@ -12,6 +12,8 @@

cd $SLURM_SUBMIT_DIR # optional, since this is the default behavior

export OMP_NUM_THREADS=1

module unload python
module use /usr/projects/climate/SHARED_CLIMATE/modulefiles/all/
module load python/anaconda-2.7-climate
Expand All @@ -25,7 +27,9 @@ command_prefix=""
# containing run_mpas_analysis
mpas_analysis_dir="."
# one parallel task per node by default
parallel_task_count=$SLURM_JOB_NUM_NODES
parallel_task_count=12
# ncclimo can run with 1 (serial) or 12 (bck) threads
ncclimo_mode=bck

if [ ! -f $run_config_file ]; then
echo "File $run_config_file not found!"
Expand All @@ -49,12 +53,13 @@ cat <<EOF > $job_config_file
# the number of parallel tasks (1 means tasks run in serial, the default)
parallelTaskCount = $parallel_task_count

# Prefix on the commnd line before a parallel task (e.g. 'srun -n 1 python')
# Default is no prefix (run_mpas_analysis is executed directly)
commandPrefix = $command_prefix
# the parallelism mode in ncclimo ("serial" or "bck")
# Set this to "bck" (background parallelism) if running on a machine that can
# handle 12 simultaneous processes, one for each monthly climatology.
ncclimoParallelMode = $ncclimo_mode

EOF

$mpas_analysis_dir/run_mpas_analysis $run_config_file \
$command_prefix $mpas_analysis_dir/run_mpas_analysis $run_config_file \
$job_config_file

15 changes: 9 additions & 6 deletions configs/olcf/job_script.olcf.bash
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
##PBS -q debug
# change number of nodes to change the number of parallel tasks
# (anything between 1 and the total number of tasks to run)
#PBS -l nodes=10
#PBS -l nodes=1
#PBS -l walltime=1:00:00
#PBS -A cli115
#PBS -N mpas_analysis
Expand All @@ -28,7 +28,9 @@ command_prefix="aprun -b -N 1 -n 1"
# containing run_mpas_analysis
mpas_analysis_dir="."
# one parallel task per node by default
parallel_task_count=$PBS_NUM_NODES
parallel_task_count=12
# ncclimo can run with 1 (serial) or 12 (bck) threads
ncclimo_mode=bck

if [ ! -f $run_config_file ]; then
echo "File $run_config_file not found!"
Expand All @@ -52,12 +54,13 @@ cat <<EOF > $job_config_file
# the number of parallel tasks (1 means tasks run in serial, the default)
parallelTaskCount = $parallel_task_count

# Prefix on the commnd line before a parallel task (e.g. 'srun -n 1 python')
# Default is no prefix (run_mpas_analysis is executed directly)
commandPrefix = $command_prefix
# the parallelism mode in ncclimo ("serial" or "bck")
# Set this to "bck" (background parallelism) if running on a machine that can
# handle 12 simultaneous processes, one for each monthly climatology.
ncclimoParallelMode = $ncclimo_mode

EOF

$mpas_analysis_dir/run_mpas_analysis $run_config_file \
$command_prefix $mpas_analysis_dir/run_mpas_analysis $run_config_file \
$job_config_file

3 changes: 1 addition & 2 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@ Top-level script: run_mpas_analysis

run_mpas_analysis.update_generate
run_mpas_analysis.run_parallel_tasks
run_mpas_analysis.launch_tasks
run_mpas_analysis.wait_for_task
run_mpas_analysis.is_running
run_mpas_analysis.build_analysis_list
run_mpas_analysis.determine_analyses_to_generate
run_mpas_analysis.run_analysis
Expand All @@ -34,6 +32,7 @@ Base Class

AnalysisTask
AnalysisTask.setup_and_check
AnalysisTask.run_analysis
AnalysisTask.run
AnalysisTask.check_generate
AnalysisTask.check_analysis_enabled
Expand Down
43 changes: 21 additions & 22 deletions mpas_analysis/analysis_task_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
5. add new analysis task to run_mpas_analysis under build_analysis_list:
analyses.append(<component>.MyTask(config, myArg='argValue'))
This will add a new object of the MyTask class to a list of analysis tasks
created in build_analysis_list. Later on in run_analysis, it will first
created in build_analysis_list. Later on in run_task, it will first
go through the list to make sure each task needs to be generated
(by calling check_generate, which is defined in AnalysisTask), then, will
call setup_and_check on each task (to make sure the appropriate AM is on
Expand Down Expand Up @@ -220,7 +220,7 @@ def setup_and_check(self): # {{{
self.endDate))

# For climatologies, update the start and end year based on the files
# that are actually available
# that are actually available
# If not analyzing climatologies, delete this line
changed, self.startYear, self.endYear, self.startDate, self.endDate = \
update_climatology_bounds_from_file_names(self.inputFiles,
Expand All @@ -234,19 +234,19 @@ def setup_and_check(self): # {{{
# images should appear on the webpage.

# Note: because of the way parallel tasks are handled in MPAS-Analysis,
# we can't be sure that run() will be called (it might be launched
# as a completely separate process) so it is not safe to store a list
# of xml files from within run(). The recommended procedure is to
# create a list of XML files here during setup_and_check() and possibly
# use them during run()
# we can't be sure that run_task() will be called (it might be
# launched as a completely separate process) so it is not safe to store
# a list of xml files from within run_task(). The recommended
# procedure is to create a list of XML files here during
# setup_and_check() and possibly use them during run_task()

self.xmlFileNames = []

# we also show how to store file prefixes for later use in creating
# plots
self.filePrefixes = {}

# plotParameters is a list of parameters, a stand-ins for whatever
# plotParameters is a list of parameters, a stand-ins for whatever
# you might want to include in each plot name, for example, seasons or
# types of observation.
self.plotParameters = self.config.getExpression(self.taskName,
Expand All @@ -261,10 +261,9 @@ def setup_and_check(self): # {{{
filePrefix))
self.filePrefixes[plotParameter] = filePrefix


# }}}

def run(self): # {{{
def run_task(self): # {{{
'''
The main method of the task that performs the analysis task.

Expand All @@ -275,9 +274,9 @@ def run(self): # {{{

# Add the main contents of the analysis task below

# No need to call AnalysisTask.run() because it doesn't do anything,
# so we don't call super(MyTask, self).run(), as we do for other
# methods above.
# No need to call AnalysisTask.run_task() because it doesn't do
# anything, so we don't call super(MyTask, self).run_task(), as we
# do for other methods above.

# Here is an example of a call to a local helper method (function),
# one for each of our plotParameters (e.g. seasons)
Expand All @@ -296,15 +295,15 @@ def run(self): # {{{
def _make_plot(self, plotParameter, optionalArgument=None): # {{{
'''
Make a simple plot

Parameters
----------
plotParameter : str
The name of a parameter that is specific to this plot

optionalArgument : <type_goes_here>, optional
An optional argument

<Performs my favorite subtask>
'''

Expand All @@ -320,20 +319,20 @@ def _make_plot(self, plotParameter, optionalArgument=None): # {{{
# get the file name based on the plot parameter
filePrefix = self.filePrefixes[plotParameter]
outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix)

# make the plot
x = numpy.linspace(0, 1, 1000)
plt.plot(x, x**2)
# save the plot to the output file
plt.savefig(outFileName)

# here's an example of how you would create an XML file for this plot
# with the appropriate entries. Some notes:
# * Gallery groups typically represent all the analysis from a task,
# or sometimes from multiple tasks
# * A gallery might be for just for one set of observations, one
# season, etc., depending on what makes sense
# * Within each gallery, there is one plot for each value in
# * Within each gallery, there is one plot for each value in
# 'plotParameters', with a corresponding caption and short thumbnail
# description
caption = 'Plot of x^2 with plotParamter: {}'.format(plotParameter)
Expand All @@ -350,8 +349,8 @@ def _make_plot(self, plotParameter, optionalArgument=None): # {{{
imageDescription=caption,
imageCaption=caption)

#

#
# }}}

# }}}
Expand Down
Loading