diff --git a/.gitignore b/.gitignore index 11427a695..26937dc4a 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,95 @@ *.png # Each tool should also have it's own .gitignore file that ignores the build files for that tool. + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# IPython Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# dotenv +.env + +# virtualenv +venv/ +ENV/ + +# Spyder project settings +.spyderproject + +# Rope project settings +.ropeproject + +.DS_Store \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..401bf749c --- /dev/null +++ b/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2013-2019, Los Alamos National Security, LLC (LANS) (Ocean: LA-CC-13-047; +Land Ice: LA-CC-13-117) and the University Corporation for Atmospheric Research (UCAR). + +All rights reserved. + +LANS is the operator of the Los Alamos National Laboratory under Contract No. +DE-AC52-06NA25396 with the U.S. Department of Energy. UCAR manages the National +Center for Atmospheric Research under Cooperative Agreement ATM-0753581 with the +National Science Foundation. The U.S. Government has rights to use, reproduce, +and distribute this software. NO WARRANTY, EXPRESS OR IMPLIED IS OFFERED BY +LANS, UCAR OR THE GOVERNMENT AND NONE OF THEM ASSUME ANY LIABILITY FOR THE USE +OF THIS SOFTWARE. If software is modified to produce derivative works, such +modified software should be clearly marked, so as not to confuse it with the +version available from LANS and UCAR. + +Additionally, redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1) Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2) Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +3) None of the names of LANS, UCAR or the names of its contributors, if any, may +be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/atmosphere/README b/atmosphere/README new file mode 100644 index 000000000..ee68f7148 --- /dev/null +++ b/atmosphere/README @@ -0,0 +1 @@ +Readme file for MPAS-Tools atmosphere directory. diff --git a/conda_package/docs/Makefile b/conda_package/docs/Makefile new file mode 100644 index 000000000..1b24251fe --- /dev/null +++ b/conda_package/docs/Makefile @@ -0,0 +1,24 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = mpas_tools +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +clean: + rm -rf *obs_table.rst generated obs + @$(SPHINXBUILD) -M clean "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/conda_package/docs/api.rst b/conda_package/docs/api.rst new file mode 100644 index 000000000..b737d8056 --- /dev/null +++ b/conda_package/docs/api.rst @@ -0,0 +1,68 @@ +############# +API reference +############# + +This page provides an auto-generated summary of the MPAS mesh-tools API. For +more details and examples, refer to the relevant chapters in the main part of +the documentation. + +MPAS mesh tools +=============== + +.. currentmodule:: mpas_tools.planar_hex + +.. autosummary:: + :toctree: generated/ + + make_planar_hex_mesh + +.. currentmodule:: mpas_tools.translate + +.. autosummary:: + :toctree: generated/ + + translate + + +.. currentmodule:: mpas_tools.conversion + +.. autosummary:: + :toctree: generated/ + + convert + cull + mask + +.. currentmodule:: mpas_tools.merge_grids + +.. autosummary:: + :toctree: generated/ + + merge_grids + +.. currentmodule:: mpas_tools.split_grids + +.. autosummary:: + :toctree: generated/ + + split_grids + +.. currentmodule:: mpas_tools.io + +.. autosummary:: + :toctree: generated/ + + write_netcdf + + +Ocean Tools +=========== + +.. currentmodule:: mpas_tools.ocean.coastline_alteration + +.. autosummary:: + :toctree: generated/ + + add_critical_land_blockages + widen_transect_edge_masks + diff --git a/conda_package/docs/conf.py b/conda_package/docs/conf.py new file mode 100644 index 000000000..4968e1e79 --- /dev/null +++ b/conda_package/docs/conf.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# +# mpas_tools documentation build configuration file, created by +# sphinx-quickstart on Sat Mar 25 14:39:11 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import mpas_tools + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ['sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.mathjax', + 'sphinx.ext.viewcode', + 'numpydoc'] + +autosummary_generate = True + +numpydoc_class_members_toctree = True +numpydoc_show_class_members = False + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +source_suffix = ['.rst'] +# source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'mpas_tools' +copyright = u'This software is open source software available under the BSD-3' \ + u'license. Copyright (c) 2019 Triad National Security, LLC. ' \ + u'All rights reserved. Copyright (c) 2019 Lawrence Livermore ' \ + u'National Security, LLC. All rights reserved. Copyright (c) ' \ + u'2019 UT-Battelle, LLC. All rights reserved.' +author = u'Xylar Asay-Davis, Doug Jacobsen, Michael Duda, Mark Petersen, ' \ + u'Matt Hoffman, Adridan Turner, Philip Wolfram' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '.'.join(str(vi) for vi in mpas_tools.__version_info__[0:2]) +# The full version, including alpha/beta/rc tags. +release = mpas_tools.__version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', + 'design_docs/template.md'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# + +# on_rtd is whether we are on readthedocs.org, this line of code grabbed from +# docs.readthedocs.org +on_rtd = os.environ.get('READTHEDOCS', None) == 'True' + +if not on_rtd: # only import and set the theme if we're building docs locally + import sphinx_rtd_theme + html_theme = 'sphinx_rtd_theme' + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'mpas_tools_doc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'mpas_tools.tex', u'mpas_tools Documentation', + author, 'manual'), +] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'mpas_tools', u'mpas_tools Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'mpas_tools', u'mpas_tools Documentation', + author, 'mpas_tools', 'One line description of project.', + 'Miscellaneous'), +] + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('https://docs.python.org/', None), + 'numpy': ('http://docs.scipy.org/doc/numpy/', None), + 'xarray': ('http://xarray.pydata.org/en/stable/', None)} + + +github_doc_root = 'https://github.com/rtfd/recommonmark/tree/master/doc/' diff --git a/conda_package/docs/environment.yml b/conda_package/docs/environment.yml new file mode 100644 index 000000000..8779e9135 --- /dev/null +++ b/conda_package/docs/environment.yml @@ -0,0 +1,19 @@ +name: mpas_tools_docs +channels: + - conda-forge +dependencies: + - python=3.7 + - pytest + - netcdf4 + - hdf5 + - libnetcdf + - numpy + - scipy + - xarray + - geometric_features + - pyevtk + - future + - backports.tempfile + - sphinx + - sphinx_rtd_theme + - numpydoc diff --git a/conda_package/docs/index.rst b/conda_package/docs/index.rst new file mode 100644 index 000000000..167ef1068 --- /dev/null +++ b/conda_package/docs/index.rst @@ -0,0 +1,15 @@ +mpas_tools +========== + +This repository houses geometric features relevant for climate science. + +.. toctree:: + :maxdepth: 2 + + api + +Indices and tables +================== + +* :ref:`genindex` + diff --git a/conda_package/mpas_tools/__init__.py b/conda_package/mpas_tools/__init__.py new file mode 100644 index 000000000..05286d486 --- /dev/null +++ b/conda_package/mpas_tools/__init__.py @@ -0,0 +1,2 @@ +__version_info__ = (0, 0, 4) +__version__ = '.'.join(str(vi) for vi in __version_info__) diff --git a/conda_package/mpas_tools/__main__.py b/conda_package/mpas_tools/__main__.py new file mode 100755 index 000000000..ffa7fa56a --- /dev/null +++ b/conda_package/mpas_tools/__main__.py @@ -0,0 +1,32 @@ +""" +MPAS mesh tools +""" + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import mpas_tools + +import argparse + + +def main(): + """ + Entry point for the main script ``mpas_tools`` + """ + + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('-v', '--version', + action='version', + version='mpas_tools {}'.format( + mpas_tools.__version__), + help="Show version number and exit") + + args = parser.parse_args() + + +if __name__ == "__main__": + main() + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python diff --git a/conda_package/mpas_tools/conversion.py b/conda_package/mpas_tools/conversion.py new file mode 100644 index 000000000..c8bd9b44c --- /dev/null +++ b/conda_package/mpas_tools/conversion.py @@ -0,0 +1,203 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import os +import xarray +import subprocess +from backports.tempfile import TemporaryDirectory +import shutil + +from mpas_tools.io import write_netcdf + + +def convert(dsIn, graphInfoFileName=None): + ''' + Use ``MpasMeshConverter.x`` to convert an input mesh to a valid MPAS + mesh that is fully compliant with the MPAS mesh specification. + https://mpas-dev.github.io/files/documents/MPAS-MeshSpec.pdf + + Parameters + ---------- + dsIn : ``xarray.Dataset`` + A data set to convert + + graphInfoFileName : str, optional + A file path (relative or absolute) where the graph file (typically + ``graph.info`` should be written out. By default, ``graph.info`` is + not saved. + + Returns + ------- + dsOut : ``xarray.Dataset`` + The MPAS mesh + ''' + + with TemporaryDirectory() as tempdir: + inFileName = '{}/mesh_in.nc'.format(tempdir) + write_netcdf(dsIn, inFileName) + + outFileName = '{}/mesh_out.nc'.format(tempdir) + + if graphInfoFileName is not None: + graphInfoFileName = os.path.abspath(graphInfoFileName) + + # go into the directory of the output file so the graph.info file ends + # up in the same place + owd = os.getcwd() + outDir = os.path.dirname(outFileName) + os.chdir(outDir) + subprocess.check_call(['MpasMeshConverter.x', inFileName, outFileName]) + os.chdir(owd) + + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + + if graphInfoFileName is not None: + shutil.copyfile('{}/graph.info'.format(outDir), + graphInfoFileName) + + return dsOut + + +def cull(dsIn, dsMask=None, dsInverse=None, dsPreserve=None, + graphInfoFileName=None): + ''' + Use ``MpasCellCuller.x`` to cull cells from a mesh based on the + ``cullCell`` field in the input file or DataSet and/or the provided masks. + ``cullCell``, dsMask and dsInverse are merged together so that the final + mask is the union of these 3. The preserve mask is then used to determine + where cells should *not* be culled. + + Parameters + ---------- + dsIn : ``xarray.Dataset`` + A data set to cull, possibly with a ``cullCell`` field set to one where + cells should be removed + + dsMask : ``xarray.Dataset`` or list, optional + A data set (or data sets) with region masks that are 1 where cells + should be culled + + dsInverse : ``xarray.Dataset`` or list, optional + A data set (or data sets) with region masks that are 0 where cells + should be culled + + dsPreserve : ``xarray.Dataset`` or list, optional + A data set (or data sets) with region masks that are 1 where cells + should *not* be culled + + graphInfoFileName : str, optional + A file path (relative or absolute) where the graph file (typically + ``culled_graph.info`` should be written out. By default, + ``culled_graph.info`` is not saved. + + Returns + ------- + dsOut : ``xarray.Dataset`` + The culled mesh + + ''' + + with TemporaryDirectory() as tempdir: + inFileName = '{}/ds_in.nc'.format(tempdir) + write_netcdf(dsIn, inFileName) + outFileName = '{}/ds_out.nc'.format(tempdir) + + args = ['MpasCellCuller.x', inFileName, outFileName] + + if dsMask is not None: + if not isinstance(dsMask, list): + dsMask = [dsMask] + for index, ds in enumerate(dsMask): + fileName = '{}/mask{}.nc'.format(tempdir, index) + write_netcdf(ds, fileName) + args.extend(['-m', fileName]) + + if dsInverse is not None: + if not isinstance(dsInverse, list): + dsInverse = [dsInverse] + for index, ds in enumerate(dsInverse): + fileName = '{}/inverse{}.nc'.format(tempdir, index) + write_netcdf(ds, fileName) + args.extend(['-i', fileName]) + + if dsPreserve is not None: + if not isinstance(dsPreserve, list): + dsPreserve = [dsPreserve] + for index, ds in enumerate(dsPreserve): + fileName = '{}/preserve{}.nc'.format(tempdir, index) + write_netcdf(ds, fileName) + args.extend(['-p', fileName]) + + # go into the directory of the output file so the graph.info file ends + # up in the same place + + if graphInfoFileName is not None: + graphInfoFileName = os.path.abspath(graphInfoFileName) + + owd = os.getcwd() + outDir = os.path.dirname(outFileName) + os.chdir(outDir) + subprocess.check_call(args) + os.chdir(owd) + + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + + if graphInfoFileName is not None: + shutil.copyfile('{}/culled_graph.info'.format(outDir), + graphInfoFileName) + + return dsOut + + +def mask(dsMesh, fcMask=None, fcSeed=None, positiveLon=False): + ''' + Use ``MpasMaskCreator.x`` to create a set of region masks either from + mask feature collecitons or from seed points to be used to flood fill + + Parameters + ---------- + dsMesh : ``xarray.Dataset``, optional + An MPAS mesh on which the masks should be created + + fcMask : ``geometric_features.FeatureCollection``, optional + A feature collection containing features to use to create the mask + + fcSeed : ``geometric_features.FeatureCollection``, optional + A feature collection with points to use a seeds for a flood fill that + will create a mask of all cells connected to the seed points + + Returns + ------- + dsMask : ``xarray.Dataset`` + The masks + + ''' + + with TemporaryDirectory() as tempdir: + inFileName = '{}/mesh_in.nc'.format(tempdir) + write_netcdf(dsMesh, inFileName) + outFileName = '{}/mesh_out.nc'.format(tempdir) + + args = ['MpasMaskCreator.x', inFileName, outFileName] + + if fcMask is not None: + fileName = '{}/mask.geojson'.format(tempdir) + fcMask.to_geojson(fileName) + args.extend(['-f', fileName]) + + if fcSeed is not None: + fileName = '{}/seed.geojson'.format(tempdir) + fcSeed.to_geojson(fileName) + args.extend(['-s', fileName]) + + if positiveLon: + args.append('--positive_lon') + + subprocess.check_call(args) + + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + + return dsOut diff --git a/conda_package/mpas_tools/io.py b/conda_package/mpas_tools/io.py new file mode 100644 index 000000000..e1298ef5d --- /dev/null +++ b/conda_package/mpas_tools/io.py @@ -0,0 +1,41 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy +import netCDF4 +from datetime import datetime +import sys + + +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals, + format='NETCDF3_64BIT'): + '''Write an xarray Dataset with NetCDF4 fill values where needed''' + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + isNumeric = numpy.issubdtype(ds[variableName].dtype, numpy.number) + if isNumeric and numpy.any(numpy.isnan(ds[variableName])): + dtype = ds[variableName].dtype + for fillType in fillValues: + if dtype == numpy.dtype(fillType): + encodingDict[variableName] = \ + {'_FillValue': fillValues[fillType]} + break + else: + encodingDict[variableName] = {'_FillValue': None} + + update_history(ds) + + ds.to_netcdf(fileName, encoding=encodingDict, format=format) + + +def update_history(ds): + '''Add or append history to attributes of a data set''' + + thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + \ + " ".join(sys.argv[:]) + if 'history' in ds.attrs: + newhist = '\n'.join([thiscommand, ds.attrs['history']]) + else: + newhist = thiscommand + ds.attrs['history'] = newhist diff --git a/conda_package/mpas_tools/merge_grids.py b/conda_package/mpas_tools/merge_grids.py new file mode 100755 index 000000000..308aeb81f --- /dev/null +++ b/conda_package/mpas_tools/merge_grids.py @@ -0,0 +1,240 @@ +#!/usr/bin/env python +""" +Tool to merge 2 MPAS non-contiguous meshes together into a single file +""" + +import os +import sys +import json +import argparse + +from datetime import datetime + +from netCDF4 import Dataset + + +def parse_args(args=None): + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + + parser.add_argument('infile1', metavar='FILENAME1', + help='File name for first mesh to merge') + + parser.add_argument('infile2', metavar='FILENAME2', + help='File name for second mesh to merge') + + parser.add_argument('-o', dest='outfile', default='merged_mesh.nc', metavar='FILENAME', + help='The merged mesh file') + + return parser.parse_args(args) + + +def merge_grids(infile1=None, infile2=None, outfile=None, runner=None): + """ + Merges two MPAS non-contiguous meshes together into a single file + + Parameters + ---------- + infile1 : str + The file name for the first mesh to merge + + infile2 : str + The file name for the second mesh to merge + + outfile : str + The file name for the first mesh to merge + + runner : str, optional + The command to write into the global history attribute of the outfile + """ + now = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + if not runner: + runner = '{}.merge_grids(infile1={}, infile2={}, outfile={})'.format( + os.path.splitext(__file__)[0], infile1, infile2, outfile) + + print('Opening files to merge:\n {}\n {}'.format(infile1, infile2)) + print('Creating the merged mesh file: {}'.format(outfile)) + with Dataset(infile1) as nc_in1, Dataset(infile2) as nc_in2, \ + Dataset(outfile, 'w', format="NETCDF3_CLASSIC") as mesh: + nCells1 = nc_in1.dimensions['nCells'].size + nEdges1 = nc_in1.dimensions['nEdges'].size + nVertices1 = nc_in1.dimensions['nVertices'].size + + nCells2 = nc_in2.dimensions['nCells'].size + nEdges2 = nc_in2.dimensions['nEdges'].size + nVertices2 = nc_in2.dimensions['nVertices'].size + + if nc_in1.dimensions['vertexDegree'].size != nc_in2.dimensions['vertexDegree'].size: + raise ValueError("ERROR: The two files have different lengths of the " + "vertexDegree dimension.") + + mesh.createDimension('nCells', nCells1 + nCells2) + mesh.createDimension('nEdges', nEdges1 + nEdges2) + mesh.createDimension('nVertices', nVertices1 + nVertices2) + mesh.createDimension('TWO', 2) + mesh.createDimension('vertexDegree', nc_in1.dimensions['vertexDegree'].size) + if 'StrLen' in nc_in1.dimensions: + mesh.createDimension('StrLen', nc_in1.dimensions['StrLen'].size) + maxEdges = max(nc_in1.dimensions['maxEdges'].size, nc_in2.dimensions['maxEdges'].size) + mesh.createDimension('maxEdges', maxEdges) + mesh.createDimension('maxEdges2', maxEdges * 2) + + optionalDims = ('Time', 'nVertLevels', 'nVertInterfaces') + for dim in optionalDims: + if dim in nc_in1.dimensions and dim in nc_in2.dimensions: + if len(nc_in1.dimensions[dim]) != len(nc_in2.dimensions[dim]): + raise ValueError("ERROR: The two files have different lengths " + "of the {} dimension.".format(dim)) + if dim == 'Time': + mesh.createDimension('Time', size=None) # make unlimited dimension + else: + mesh.createDimension(dim, nc_in1.dimensions[dim].size) + + print('Merging variable:') + vars1 = set(nc_in1.variables) + vars2 = set(nc_in2.variables) + # only copy variables common to both files + for varname in (vars1 & vars2): + print(' {}'.format(varname)) + if nc_in1.variables[varname].dimensions \ + != nc_in2.variables[varname].dimensions: + raise ValueError("ERROR: Variable {} has different dimensions in " + "the two files.".format(varname)) + + theVar = nc_in1.variables[varname] + newVar = mesh.createVariable(varname, theVar.dtype, theVar.dimensions) + # (Assuming here that nCells, nEdges, and nVertices are never both in a variable) + # now assign value + if 'nCells' in theVar.dimensions: + tup1 = () + tup2 = () + tupMerge = () + for ind in range(len(theVar.dimensions)): + if theVar.dimensions[ind] == 'nCells': + tup1 += (slice(0, nCells1),) + tup2 += (slice(0, nCells2),) + tupMerge += (slice(nCells1, nCells1 + nCells2),) + else: + tup1 += (slice(None),) + tup2 += (slice(None),) + tupMerge += (slice(None),) + newVar[tup1] = nc_in1.variables[varname][tup1] + newVar[tupMerge] = nc_in2.variables[varname][tup2] + elif 'nEdges' in theVar.dimensions: + tup1 = () + tup2 = () + tupMerge = () + for ind in range(len(theVar.dimensions)): + if theVar.dimensions[ind] == 'nEdges': + tup1 += (slice(0, nEdges1),) + tup2 += (slice(0, nEdges2),) + tupMerge += (slice(nEdges1, nEdges1 + nEdges2),) + else: + tup1 += (slice(None),) + tup2 += (slice(None),) + tupMerge += (slice(None),) + newVar[tup1] = nc_in1.variables[varname][tup1] + newVar[tupMerge] = nc_in2.variables[varname][tup2] + elif 'nVertices' in theVar.dimensions: + tup1 = () + tup2 = () + tupMerge = () + for ind in range(len(theVar.dimensions)): + if theVar.dimensions[ind] == 'nVertices': + tup1 += (slice(0, nVertices1),) + tup2 += (slice(0, nVertices2),) + tupMerge += (slice(nVertices1, nVertices1 + nVertices2),) + else: + tup1 += (slice(None),) + tup2 += (slice(None),) + tupMerge += (slice(None),) + newVar[tup1] = nc_in1.variables[varname][tup1] + newVar[tupMerge] = nc_in2.variables[varname][tup2] + else: + # just take file 1's version + newVar[:] = theVar[:] + + # Indexes need adjusting: + if varname == "indexToCellID": + newVar[nCells1:] += nCells1 + elif varname == "indexToEdgeID": + newVar[nEdges1:] += nEdges1 + elif varname == "indexToVertexID": + newVar[nVertices1:] += nVertices1 + elif varname == "cellsOnEdge": + part2 = newVar[nEdges1:, :] + part2[part2 > 0] += nCells1 + newVar[nEdges1:, :] = part2 + elif varname == "edgesOnCell": + part2 = newVar[nCells1:, :] + part2[part2 > 0] += nEdges1 + newVar[nCells1:, :] = part2 + elif varname == "edgesOnEdge": + part2 = newVar[nEdges1:, :] + part2[part2 > 0] += nEdges1 + newVar[nEdges1:, :] = part2 + elif varname == "cellsOnCell": + part2 = newVar[nCells1:, :] + part2[part2 > 0] += nCells1 + newVar[nCells1:, :] = part2 + elif varname == "verticesOnCell": + part2 = newVar[nCells1:, :] + part2[part2 > 0] += nVertices1 + newVar[nCells1:, :] = part2 + elif varname == "verticesOnEdge": + part2 = newVar[nEdges1:, :] + part2[part2 > 0] += nVertices1 + newVar[nEdges1:, :] = part2 + elif varname == "edgesOnVertex": + part2 = newVar[nVertices1:, :] + part2[part2 > 0] += nEdges1 + newVar[nVertices1:, :] = part2 + elif varname == "cellsOnVertex": + part2 = newVar[nVertices1:, :] + part2[part2 > 0] += nCells1 + newVar[nVertices1:, :] = part2 + + attrToCopy = ("on_a_sphere", "sphere_radius", "is_periodic") + for attr in attrToCopy: + if attr in nc_in1.ncattrs() and attr in nc_in2.ncattrs(): + if nc_in1.getncattr(attr) == nc_in2.getncattr(attr): + mesh.setncattr(attr, nc_in1.getncattr(attr)) + else: + print( + "Warning: Value for '{0}' global attribute differs between " + "input files. '{0}' being skipped.".format(attr)) + else: + print("Warning: '{0}' global attribute not present in both input " + "files. '{0}' being skipped.".format(attr)) + + # Add merge info to allow exact splitting later + mesh.merge_point = json.dumps({'nCells': nCells1, + 'nEdges': nEdges1, + 'nVertices': nVertices1, + 'maxEdges1': nc_in1.dimensions['maxEdges'].size, + 'maxEdges2': nc_in2.dimensions['maxEdges'].size + }) + + run_command = "{}: {} \n".format(now, runner) + mesh.history = maybe_encode(run_command) + + print('Merge complete! Output file: {}.'.format(outfile)) + + +# NOTE: Python 2 and 3 string fun conflicting with NC_CHAR vs NC_STRING, see: +# https://github.com/Unidata/netcdf4-python/issues/529 +def maybe_encode(string, encoding='ascii'): + try: + return string.encode(encoding) + except UnicodeEncodeError: + return string + + +def main(): + arguments = parse_args() + arguments.runner = ' '.join(sys.argv[:]) + merge_grids(**vars(arguments)) + + +if __name__ == '__main__': + main() diff --git a/conda_package/mpas_tools/ocean/__init__.py b/conda_package/mpas_tools/ocean/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/conda_package/mpas_tools/ocean/coastline_alteration.py b/conda_package/mpas_tools/ocean/coastline_alteration.py new file mode 100644 index 000000000..5a654dda0 --- /dev/null +++ b/conda_package/mpas_tools/ocean/coastline_alteration.py @@ -0,0 +1,361 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy +import xarray + + +def add_critical_land_blockages(dsMask, dsBlockages): + ''' + Parameters + ---------- + dsMask : `xarray.Dataset` + The mask to which critical blockages should be added + dsBlockage : `xarray.Dataset` + The transect masks defining critical land regions that should block + ocean flow (e.g. the Antarctic Peninsula) + + Returns + ------- + dsMask : `xarray.Dataset` + The mask with critical blockages included + ''' + + dsMask = dsMask.copy() + + nTransects = dsBlockages.sizes['nTransects'] + for transectIndex in range(nTransects): + dsMask.regionCellMasks[:, 0] = numpy.maximum( + dsBlockages.transectCellMasks[:, transectIndex], + dsMask.regionCellMasks[:, 0]) + + return dsMask + + +def widen_transect_edge_masks(dsMask, dsMesh, latitude_threshold=43.0): + ''' + Parameters + ---------- + dsMask : `xarray.Dataset` + The mask to which critical blockages should be added + dsMesh : `xarray.Dataset` + The transect masks defining critical land regions that should block + ocean flow (e.g. the Antarctic Peninsula) + latitude_threshold : float + Minimum latitude, degrees, for transect widening + + Returns + ------- + dsMask : `xarray.Dataset` + The mask with critical blockages included + ''' + latitude_threshold_radians = numpy.deg2rad(latitude_threshold) + + dsMask = dsMask.copy() + + maxEdges = dsMesh.sizes['maxEdges'] + + latMask = numpy.abs(dsMesh.latEdge) > latitude_threshold_radians + + edgeMask = numpy.logical_and( + latMask, dsMask.transectEdgeMasks == 1) + for iEdge in range(maxEdges): + eoc = dsMesh.edgesOnCell[:, iEdge]-1 + mask = numpy.logical_and(eoc >= 0, + edgeMask[eoc]) + # cells with a neighboring transect edge should be masked to 1 + dsMask['transectCellMasks'] = dsMask.transectCellMasks.where( + numpy.logical_not(mask), 1.) + + return dsMask + + +def add_land_locked_cells_to_mask(dsMask, dsMesh, latitude_threshold=43.0, + nSweeps=10): + ''' + Find ocean cells that are land-locked, and alter the cell mask so that they + are counted as land cells. + + Parameters + ---------- + dsMask : ``xarray.Dataset`` + A land-mask data set + + dsMesh : ``xarray.Dataset`` + MPAS Mesh data set + + latitude_threshold : float, optional + Minimum latitude, in degrees, for transect widening + + nSweeps : int, optional + Maximum number of sweeps to search for land-locked cells + + Returns + ------- + dsMask : ``xarray.Dataset`` + A copy of the land-mask data set with land-locked cells added to the + mask for the first region + ''' + + dsMask = xarray.Dataset(dsMask) + dsMesh = dsMesh.copy(deep=True) + + landMask = dsMask.regionCellMasks.max(dim='nRegions') > 0 + + dsMask['landMaskDiagnostic'] = xarray.where(landMask, 1, 0) + + print("Running add_land_locked_cells_to_mask.py. Total number of cells: " + "{}".format(dsMesh.sizes['nCells'])) + + cellsOnCell = dsMesh.cellsOnCell - 1 + nEdgesOnCell = dsMesh.nEdgesOnCell + + nextCellsOnCell = cellsOnCell.copy(deep=True) + prevCellsOnCell = cellsOnCell.copy(deep=True) + for iEdgeOnCell in range(nextCellsOnCell.shape[1]): + iP1 = numpy.mod(iEdgeOnCell + 1, nEdgesOnCell) + nextCellsOnCell[:, iEdgeOnCell] = cellsOnCell[:, iP1] + iM1 = numpy.mod(iEdgeOnCell - 1, nEdgesOnCell) + prevCellsOnCell[:, iEdgeOnCell] = cellsOnCell[:, iM1] + + dsMesh['cellsOnCell'] = cellsOnCell + dsMesh['nextCellsOnCell'] = nextCellsOnCell + dsMesh['prevCellsOnCell'] = prevCellsOnCell + dsMesh['latCell'] = numpy.rad2deg(dsMesh.latCell) + dsMesh['lonCell'] = numpy.rad2deg(dsMesh.lonCell) + + landMask, removable = _remove_cells_with_isolated_edges1( + dsMask, dsMesh, landMask, latitude_threshold) + landMask = _remove_cells_with_isolated_edges2( + dsMask, dsMesh, landMask, removable, nSweeps) + oceanMask = _flood_fill(dsMask, dsMesh, landMask, removable) + landMask = _revert_cells_with_connected_edges( + dsMask, dsMesh, oceanMask, landMask, removable, nSweeps) + + return dsMask + + +def _remove_cells_with_isolated_edges1(dsMask, dsMesh, landMask, + latitude_threshold): + print("Step 1: Searching for land-locked cells. Remove cells that only " + "have isolated active edges.") + + landMaskNew = landMask.copy(deep=True) + + active = numpy.logical_not(landMask) + removable = numpy.logical_and( + numpy.abs(dsMesh.latCell) >= latitude_threshold, active) + + cellsOnCell = dsMesh.cellsOnCell + valid = numpy.logical_and(removable, cellsOnCell >= 0) + activeEdge = numpy.logical_and(valid, active[cellsOnCell]) + + nextCellsOnCell = dsMesh.nextCellsOnCell + valid = numpy.logical_and(removable, nextCellsOnCell >= 0) + activeNextEdge = numpy.logical_and(valid, active[nextCellsOnCell]) + + # which vertices have adjacent active edges on this cell? + activeAdjacentEdges = numpy.logical_and(activeEdge, activeNextEdge) + + # which removable cells have no pairs of adjacent active cells? + noActiveAdjacentEdges = numpy.logical_and( + removable, numpy.logical_not(numpy.any(activeAdjacentEdges, axis=1))) + + landMaskNew[noActiveAdjacentEdges] = 1 + landLockedCounter = numpy.count_nonzero(noActiveAdjacentEdges) + + dsMask.regionCellMasks[:, 0] = numpy.maximum(dsMask.regionCellMasks[:, 0], + 1*noActiveAdjacentEdges) + + dsMask.landMaskDiagnostic[noActiveAdjacentEdges] = 2 + + print(" Number of landLocked cells: {}".format(landLockedCounter)) + + return landMaskNew, removable + + +def _remove_cells_with_isolated_edges2(dsMask, dsMesh, landMask, removable, + nSweeps): + print("Step 2: Searching for land-locked cells. Remove cells that have " + "any isolated active edges.") + + cellsOnCell = dsMesh.cellsOnCell + nextCellsOnCell = dsMesh.nextCellsOnCell + prevCellsOnCell = dsMesh.prevCellsOnCell + + for iSweep in range(nSweeps): + landLockedCounter = 0 + landMaskNew = landMask.copy(deep=True) + + active = numpy.logical_not(landMask) + mask = numpy.logical_and(removable, active) + + valid = numpy.logical_and(mask, cellsOnCell >= 0) + activeEdge = numpy.logical_and(valid, active[cellsOnCell]) + valid = numpy.logical_and(mask, nextCellsOnCell >= 0) + activeNextEdge = numpy.logical_and(valid, active[nextCellsOnCell]) + valid = numpy.logical_and(mask, prevCellsOnCell >= 0) + activePrevEdge = numpy.logical_and(valid, active[prevCellsOnCell]) + + # an edge is land-locked if it is active but neither neighbor is active + landLockedEdges = numpy.logical_and( + activeEdge, + numpy.logical_not( + numpy.logical_or(activePrevEdge, activeNextEdge))) + + landLockedCells = numpy.any(landLockedEdges, axis=1) + + landLockedCounter = numpy.count_nonzero(landLockedCells) + if landLockedCounter > 0: + landMaskNew[landLockedCells] = 1 + dsMask.regionCellMasks[landLockedCells, 0] = 1 + dsMask.landMaskDiagnostic[landLockedCells] = 3 + + landMask = landMaskNew + print(" Sweep: {} Number of landLocked cells removed: {}".format( + iSweep + 1, landLockedCounter)) + if landLockedCounter == 0: + break + + return landMask + + +def _flood_fill(dsMask, dsMesh, landMask, removable): + print("Step 3: Perform flood fill, starting from open ocean.") + + # init flood fill to 0 for water, -1 for land, 1 for known open ocean + floodFill = xarray.where( + numpy.logical_and(removable, numpy.logical_not(landMask)), 0, -1) + + latCell = dsMesh.latCell + lonCell = dsMesh.lonCell + + cellsOnCell = dsMesh.cellsOnCell + + # North Pole + mask = latCell > 84.0 + openOceanMask = mask + + # Arctic + mask = numpy.logical_and( + numpy.logical_and(lonCell > 160.0, lonCell < 230.0), + latCell > 73.0) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # North Atlantic + mask = numpy.logical_and( + numpy.logical_and(lonCell > 315.0, lonCell < 340.0), + numpy.logical_and(latCell > 15.0, latCell < 45.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + mask = numpy.logical_and( + numpy.logical_and(lonCell > 290.0, lonCell < 300.0), + numpy.logical_and(latCell > 72.0, latCell < 75.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + mask = numpy.logical_and( + numpy.logical_and(lonCell > 0.0, lonCell < 10.0), + numpy.logical_and(latCell > 70.0, latCell < 75.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # North Pacific + mask = numpy.logical_and( + numpy.logical_and(lonCell > 150.0, lonCell < 225.0), + numpy.logical_and(latCell > 0.0, latCell < 45.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # South Atlantic + mask = numpy.logical_and( + numpy.logical_and(lonCell > 0.0, lonCell < 5.0), + numpy.logical_and(latCell > -60.0, latCell < 0.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # South Pacific + mask = numpy.logical_and( + numpy.logical_and(lonCell > 180.0, lonCell < 280.0), + numpy.logical_and(latCell > -60.0, latCell < -10.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + # Southern Ocean + mask = numpy.logical_and( + numpy.logical_and(lonCell > 0.0, lonCell < 165.0), + numpy.logical_and(latCell > -60.0, latCell < -45.0)) + openOceanMask = numpy.logical_or(openOceanMask, mask) + + mask = numpy.logical_and(floodFill == 0, openOceanMask) + floodFill[mask] = 1 + + nFloodableCells = numpy.count_nonzero(floodFill == 0) + print(" Initial number of flood cells: {}".format(nFloodableCells)) + + dsMask.landMaskDiagnostic[floodFill == 1] = 5 + + # sweep over neighbors of known open ocean points + for iSweep in range(dsMesh.sizes['nCells']): + + newFloodCellsThisSweep = 0 + mask = floodFill == 0 + cellIndices = numpy.nonzero(mask.values)[0] + for iCellOnCell in range(cellsOnCell.shape[1]): + neighbors = cellsOnCell[cellIndices, iCellOnCell] + filledNeighbors = numpy.logical_and(neighbors >= 0, + floodFill[neighbors] == 1) + fillIndices = cellIndices[filledNeighbors.values] + if(len(fillIndices) > 0): + floodFill[fillIndices] = 1 + newFloodCellsThisSweep += len(fillIndices) + + print(" Sweep {} new flood cells this sweep: {}".format( + iSweep, newFloodCellsThisSweep)) + + if (newFloodCellsThisSweep == 0): + break + + oceanMask = (floodFill == 1) + + print('oceanMask:', numpy.count_nonzero(oceanMask)) + + return oceanMask + + +def _revert_cells_with_connected_edges(dsMask, dsMesh, oceanMask, landMask, + removable, nSweeps): + print("Step 4: Searching for land-locked cells, step 3: revert cells with " + "connected active edges") + + cellsOnCell = dsMesh.cellsOnCell + nextCellsOnCell = dsMesh.nextCellsOnCell + prevCellsOnCell = dsMesh.prevCellsOnCell + + for iSweep in range(nSweeps): + landMaskNew = numpy.array(landMask) + + # only remove a cell that was added in Step 2, + # _remove_cells_with_isolated_edges2 + mask = numpy.logical_and(removable, dsMask.landMaskDiagnostic == 3) + + notLand = numpy.logical_not(landMask) + valid = numpy.logical_and(mask, cellsOnCell >= 0) + oceanEdge = numpy.logical_and(valid, oceanMask[cellsOnCell]) + valid = numpy.logical_and(mask, nextCellsOnCell >= 0) + activeNextEdge = numpy.logical_and(valid, notLand[nextCellsOnCell]) + valid = numpy.logical_and(mask, prevCellsOnCell >= 0) + activePrevEdge = numpy.logical_and(valid, notLand[prevCellsOnCell]) + + reactivate = numpy.any( + numpy.logical_and( + oceanEdge, + numpy.logical_or(activePrevEdge, activeNextEdge)), axis=1) + + landLockedCounter = numpy.count_nonzero(reactivate) + if landLockedCounter > 0: + landMaskNew[reactivate] = 0 + dsMask.regionCellMasks[reactivate, 0] = 0 + oceanMask[reactivate] = 1 + dsMask.landMaskDiagnostic[reactivate] = 4 + + landMask = landMaskNew + print(" Sweep: {} Number of land-locked cells returned: {}".format( + iSweep + 1, landLockedCounter)) + if landLockedCounter == 0: + break + + return landMask diff --git a/conda_package/mpas_tools/planar_hex.py b/conda_package/mpas_tools/planar_hex.py new file mode 100755 index 000000000..81f9fba77 --- /dev/null +++ b/conda_package/mpas_tools/planar_hex.py @@ -0,0 +1,465 @@ +#!/usr/bin/env python + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy +import xarray +import argparse + +from mpas_tools.io import write_netcdf + + +def make_planar_hex_mesh(nx, ny, dc, nonperiodic_x, + nonperiodic_y, outFileName=None, + compareWithFileName=None, + format='NETCDF3_64BIT'): + ''' + Builds an MPAS periodic, planar hexagonal mesh with the requested + dimensions, optionally saving it to a file, and returs it as an + ``xarray.Dataset``. + + Parameters + ---------- + nx : int + The number of cells in the x direction + + ny : even int + The number of cells in the y direction (must be an even number for + periodicity to work out) + + dc : float + The distance in meters between adjacent cell centers. + + nonperiodic_x, nonperiodic_y : bool + is the mesh non-periodic in x and y directions? + + outFileName : str, optional + The name of a file to save the mesh to. The mesh is not saved to a + file if no file name is supplied. + + compareWithFileName : str, optional + The name of a grid file to compare with to see if they are identical, + used for testing purposes + + format : {'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_64BIT', 'NETCDF3_CLASSIC'}, optional + The NetCDF format to use for output + + Returns + ------- + mesh : ``xarray.Dataset`` + The mesh data set, available for further maniuplation such as culling + cells or removing periodicity. + ''' + + mesh = initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y) + compute_indices_on_cell(mesh) + if nonperiodic_x: + mark_cull_cell_nonperiodic_x(mesh) + if nonperiodic_y: + mark_cull_cell_nonperiodic_y(mesh) + compute_indices_on_edge(mesh) + compute_indices_on_vertex(mesh) + compute_weights_on_edge(mesh) + compute_coordinates(mesh) + add_one_to_indices(mesh) + + # drop some arrays that aren't stantard for MPAS but were used to compute + # the hex mesh + mesh = mesh.drop(['cellIdx', 'cellRow', 'cellCol']) + mesh.attrs.pop('dc') + + if outFileName is not None: + write_netcdf(mesh, outFileName, format=format) + + if compareWithFileName is not None: + # used to make sure results are exactly identical to periodic_hex + make_diff(mesh, compareWithFileName, 'diff.nc') + + return mesh + + +def initial_setup(nx, ny, dc, nonperiodic_x, nonperiodic_y): + '''Setup the dimensions and add placeholders for some index variables''' + if ny % 2 != 0: + raise ValueError('ny must be divisible by 2 for the grid\'s ' + 'periodicity to work properly.') + + mesh = xarray.Dataset() + + if nonperiodic_x and nonperiodic_y: + mesh.attrs['is_periodic'] = 'NO' + else: + mesh.attrs['is_periodic'] = 'YES' + + if nonperiodic_x: + mesh.attrs['x_period'] = 0. + else: + mesh.attrs['x_period'] = nx * dc + if nonperiodic_y: + mesh.attrs['y_period'] = 0. + else: + mesh.attrs['y_period'] = ny * dc * numpy.sqrt(3.) / 2. + + mesh.attrs['dc'] = dc + + mesh.attrs['on_a_sphere'] = 'NO' + mesh.attrs['sphere_radius'] = 0. + + if nonperiodic_x: + nx = nx + 2 + if nonperiodic_y: + ny = ny + 2 + + nCells = nx * ny + nEdges = 3 * nCells + nVertices = 2 * nCells + vertexDegree = 3 + maxEdges = 6 + + # add some basic arrays to get all the dimensions in place + indexToCellID = numpy.arange(nCells, dtype='i4') + indexToEdgeID = numpy.arange(nEdges, dtype='i4') + indexToVertexID = numpy.arange(nVertices, dtype='i4') + + cellIdx = indexToCellID.reshape(ny, nx) + cellCol, cellRow = numpy.meshgrid(numpy.arange(nx, dtype='i4'), + numpy.arange(ny, dtype='i4')) + + mesh['cellIdx'] = (('ny', 'nx'), cellIdx) + mesh['cellRow'] = (('nCells'), cellRow.ravel()) + mesh['cellCol'] = (('nCells'), cellCol.ravel()) + + mesh['indexToCellID'] = (('nCells'), indexToCellID) + mesh['indexToEdgeID'] = (('nEdges'), indexToEdgeID) + mesh['indexToVertexID'] = (('nVertices'), indexToVertexID) + + mesh['cullCell'] = (('nCells'), numpy.zeros(nCells, 'i4')) + + mesh['nEdgesOnCell'] = (('nCells',), 6 * numpy.ones((nCells,), 'i4')) + mesh['cellsOnCell'] = (('nCells', 'maxEdges'), + numpy.zeros((nCells, maxEdges), 'i4')) + mesh['edgesOnCell'] = (('nCells', 'maxEdges'), + numpy.zeros((nCells, maxEdges), 'i4')) + mesh['verticesOnCell'] = (('nCells', 'maxEdges'), + numpy.zeros((nCells, maxEdges), 'i4')) + + mesh['nEdgesOnEdge'] = (('nEdges',), 10 * numpy.ones((nEdges,), 'i4')) + mesh['cellsOnEdge'] = (('nEdges', 'TWO'), + numpy.zeros((nEdges, 2), 'i4')) + mesh['edgesOnEdge'] = (('nEdges', 'maxEdges2'), + -1 * numpy.ones((nEdges, 2 * maxEdges), 'i4')) + mesh['verticesOnEdge'] = (('nEdges', 'TWO'), + numpy.zeros((nEdges, 2), 'i4')) + + mesh['cellsOnVertex'] = (('nVertices', 'vertexDegree'), + numpy.zeros((nVertices, vertexDegree), 'i4')) + mesh['edgesOnVertex'] = (('nVertices', 'vertexDegree'), + numpy.zeros((nVertices, vertexDegree), 'i4')) + + return mesh + + +def mark_cull_cell_nonperiodic_y(mesh): + + cullCell = mesh.cullCell + nCells = mesh.sizes['nCells'] + nx = mesh.sizes['nx'] + cullCell[0:nx] = 1 + cullCell[nCells - nx:nCells + 1] = 1 + + +def mark_cull_cell_nonperiodic_x(mesh): + + cullCell = mesh.cullCell + nCells = mesh.sizes['nCells'] + nx = mesh.sizes['nx'] + cullCell[::nx] = 1 + cullCell[nx - 1:nCells + 1:nx] = 1 + + +def compute_indices_on_cell(mesh): + + cellIdx = mesh.cellIdx + cellRow = mesh.cellRow + cellCol = mesh.cellCol + + indexToCellID = mesh.indexToCellID + + nx = mesh.sizes['nx'] + ny = mesh.sizes['ny'] + + mx = numpy.mod(cellCol - 1, nx) + my = numpy.mod(cellRow - 1, ny) + px = numpy.mod(cellCol + 1, nx) + py = numpy.mod(cellRow + 1, ny) + + mask = numpy.mod(cellRow, 2) == 0 + + cellsOnCell = mesh.cellsOnCell + cellsOnCell[:, 0] = cellIdx[cellRow, mx] + cellsOnCell[:, 1] = cellIdx[my, mx].where(mask, cellIdx[my, cellCol]) + cellsOnCell[:, 2] = cellIdx[my, cellCol].where(mask, cellIdx[my, px]) + cellsOnCell[:, 3] = cellIdx[cellRow, px] + cellsOnCell[:, 4] = cellIdx[py, cellCol].where(mask, cellIdx[py, px]) + cellsOnCell[:, 5] = cellIdx[py, mx].where(mask, cellIdx[py, cellCol]) + + edgesOnCell = mesh.edgesOnCell + edgesOnCell[:, 0] = 3 * indexToCellID + edgesOnCell[:, 1] = 3 * indexToCellID + 1 + edgesOnCell[:, 2] = 3 * indexToCellID + 2 + edgesOnCell[:, 3] = 3 * cellsOnCell[:, 3] + edgesOnCell[:, 4] = 3 * cellsOnCell[:, 4] + 1 + edgesOnCell[:, 5] = 3 * cellsOnCell[:, 5] + 2 + + verticesOnCell = mesh.verticesOnCell + verticesOnCell[:, 0] = 2 * indexToCellID + verticesOnCell[:, 1] = 2 * indexToCellID + 1 + verticesOnCell[:, 2] = 2 * cellsOnCell[:, 2] + verticesOnCell[:, 3] = 2 * cellsOnCell[:, 3] + 1 + verticesOnCell[:, 4] = 2 * cellsOnCell[:, 3] + verticesOnCell[:, 5] = 2 * cellsOnCell[:, 4] + 1 + + +def compute_indices_on_edge(mesh): + edgesOnCell = mesh.edgesOnCell + verticesOnCell = mesh.verticesOnCell + indexToCellID = mesh.indexToCellID + + cellsOnEdge = mesh.cellsOnEdge + for j in range(3): + cellsOnEdge[edgesOnCell[:, j], 1] = indexToCellID + for j in range(3, 6): + cellsOnEdge[edgesOnCell[:, j], 0] = indexToCellID + + verticesOnEdge = mesh.verticesOnEdge + verticesOnEdge[edgesOnCell[:, 0], 0] = verticesOnCell[:, 1] + verticesOnEdge[edgesOnCell[:, 0], 1] = verticesOnCell[:, 0] + verticesOnEdge[edgesOnCell[:, 1], 0] = verticesOnCell[:, 2] + verticesOnEdge[edgesOnCell[:, 1], 1] = verticesOnCell[:, 1] + verticesOnEdge[edgesOnCell[:, 2], 0] = verticesOnCell[:, 3] + verticesOnEdge[edgesOnCell[:, 2], 1] = verticesOnCell[:, 2] + + edgesOnEdge = mesh.edgesOnEdge + edgesOnEdge[edgesOnCell[:, 3], 0] = edgesOnCell[:, 4] + edgesOnEdge[edgesOnCell[:, 3], 1] = edgesOnCell[:, 5] + edgesOnEdge[edgesOnCell[:, 3], 2] = edgesOnCell[:, 0] + edgesOnEdge[edgesOnCell[:, 3], 3] = edgesOnCell[:, 1] + edgesOnEdge[edgesOnCell[:, 3], 4] = edgesOnCell[:, 2] + + edgesOnEdge[edgesOnCell[:, 4], 0] = edgesOnCell[:, 5] + edgesOnEdge[edgesOnCell[:, 4], 1] = edgesOnCell[:, 0] + edgesOnEdge[edgesOnCell[:, 4], 2] = edgesOnCell[:, 1] + edgesOnEdge[edgesOnCell[:, 4], 3] = edgesOnCell[:, 2] + edgesOnEdge[edgesOnCell[:, 4], 4] = edgesOnCell[:, 3] + + edgesOnEdge[edgesOnCell[:, 5], 0] = edgesOnCell[:, 0] + edgesOnEdge[edgesOnCell[:, 5], 1] = edgesOnCell[:, 1] + edgesOnEdge[edgesOnCell[:, 5], 2] = edgesOnCell[:, 2] + edgesOnEdge[edgesOnCell[:, 5], 3] = edgesOnCell[:, 3] + edgesOnEdge[edgesOnCell[:, 5], 4] = edgesOnCell[:, 4] + + edgesOnEdge[edgesOnCell[:, 0], 5] = edgesOnCell[:, 1] + edgesOnEdge[edgesOnCell[:, 0], 6] = edgesOnCell[:, 2] + edgesOnEdge[edgesOnCell[:, 0], 7] = edgesOnCell[:, 3] + edgesOnEdge[edgesOnCell[:, 0], 8] = edgesOnCell[:, 4] + edgesOnEdge[edgesOnCell[:, 0], 9] = edgesOnCell[:, 5] + + edgesOnEdge[edgesOnCell[:, 1], 5] = edgesOnCell[:, 2] + edgesOnEdge[edgesOnCell[:, 1], 6] = edgesOnCell[:, 3] + edgesOnEdge[edgesOnCell[:, 1], 7] = edgesOnCell[:, 4] + edgesOnEdge[edgesOnCell[:, 1], 8] = edgesOnCell[:, 5] + edgesOnEdge[edgesOnCell[:, 1], 9] = edgesOnCell[:, 0] + + edgesOnEdge[edgesOnCell[:, 2], 5] = edgesOnCell[:, 3] + edgesOnEdge[edgesOnCell[:, 2], 6] = edgesOnCell[:, 4] + edgesOnEdge[edgesOnCell[:, 2], 7] = edgesOnCell[:, 5] + edgesOnEdge[edgesOnCell[:, 2], 8] = edgesOnCell[:, 0] + edgesOnEdge[edgesOnCell[:, 2], 9] = edgesOnCell[:, 1] + + +def compute_indices_on_vertex(mesh): + edgesOnCell = mesh.edgesOnCell + verticesOnCell = mesh.verticesOnCell + indexToCellID = mesh.indexToCellID + + cellsOnVertex = mesh.cellsOnVertex + cellsOnVertex[verticesOnCell[:, 1], 2] = indexToCellID + cellsOnVertex[verticesOnCell[:, 3], 0] = indexToCellID + cellsOnVertex[verticesOnCell[:, 5], 1] = indexToCellID + cellsOnVertex[verticesOnCell[:, 0], 0] = indexToCellID + cellsOnVertex[verticesOnCell[:, 2], 1] = indexToCellID + cellsOnVertex[verticesOnCell[:, 4], 2] = indexToCellID + + edgesOnVertex = mesh.edgesOnVertex + edgesOnVertex[verticesOnCell[:, 0], 0] = edgesOnCell[:, 0] + edgesOnVertex[verticesOnCell[:, 1], 0] = edgesOnCell[:, 0] + edgesOnVertex[verticesOnCell[:, 2], 2] = edgesOnCell[:, 1] + edgesOnVertex[verticesOnCell[:, 1], 2] = edgesOnCell[:, 1] + edgesOnVertex[verticesOnCell[:, 2], 1] = edgesOnCell[:, 2] + edgesOnVertex[verticesOnCell[:, 3], 1] = edgesOnCell[:, 2] + + +def compute_weights_on_edge(mesh): + edgesOnCell = mesh.edgesOnCell + + nEdges = mesh.sizes['nEdges'] + maxEdges2 = mesh.sizes['maxEdges2'] + mesh['weightsOnEdge'] = (('nEdges', 'maxEdges2'), + numpy.zeros((nEdges, maxEdges2), 'f8')) + weightsOnEdge = mesh.weightsOnEdge + + weights = (1. / numpy.sqrt(3.)) * numpy.array( + [[1. / 3., 1. / 6., 0., 1. / 6., 1. / 3.], + [1. / 3., -1. / 6., 0., 1. / 6., -1. / 3.], + [-1. / 3., -1. / 6., 0., -1. / 6., -1. / 3.]]) + for i in range(3): + for j in range(5): + weightsOnEdge[edgesOnCell[:, i + 3], j] = weights[i, j] + for i in range(3): + for j in range(5): + weightsOnEdge[edgesOnCell[:, i], j + 5] = weights[i, j] + + +def compute_coordinates(mesh): + + dc = mesh.attrs['dc'] + edgesOnCell = mesh.edgesOnCell + verticesOnCell = mesh.verticesOnCell + + nCells = mesh.sizes['nCells'] + nEdges = mesh.sizes['nEdges'] + nVertices = mesh.sizes['nVertices'] + vertexDegree = mesh.sizes['vertexDegree'] + + mesh['latCell'] = (('nCells'), numpy.zeros((nCells,), 'f8')) + mesh['lonCell'] = (('nCells'), numpy.zeros((nCells,), 'f8')) + + mesh['latEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + mesh['lonEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + + mesh['latVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + mesh['lonVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + + cellRow = mesh.cellRow + cellCol = mesh.cellCol + mask = numpy.mod(cellRow, 2) == 0 + + mesh['xCell'] = (dc * (cellCol + 0.5)).where(mask, dc * (cellCol + 1)) + mesh['yCell'] = dc * (cellRow + 1) * numpy.sqrt(3.) / 2. + mesh['zCell'] = (('nCells'), numpy.zeros((nCells,), 'f8')) + + mesh['xEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + mesh['yEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + mesh['zEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + + mesh.xEdge[edgesOnCell[:, 0]] = mesh.xCell - 0.5 * dc + mesh.yEdge[edgesOnCell[:, 0]] = mesh.yCell + + mesh.xEdge[edgesOnCell[:, 1]] = mesh.xCell - \ + 0.5 * dc * numpy.cos(numpy.pi / 3.) + mesh.yEdge[edgesOnCell[:, 1]] = mesh.yCell - \ + 0.5 * dc * numpy.sin(numpy.pi / 3.) + + mesh.xEdge[edgesOnCell[:, 2]] = mesh.xCell + \ + 0.5 * dc * numpy.cos(numpy.pi / 3.) + mesh.yEdge[edgesOnCell[:, 2]] = mesh.yCell - \ + 0.5 * dc * numpy.sin(numpy.pi / 3.) + + mesh['xVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + mesh['yVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + mesh['zVertex'] = (('nVertices'), numpy.zeros((nVertices,), 'f8')) + + mesh.xVertex[verticesOnCell[:, 0]] = mesh.xCell - 0.5 * dc + mesh.yVertex[verticesOnCell[:, 0]] = mesh.yCell + dc * numpy.sqrt(3.) / 6. + + mesh.xVertex[verticesOnCell[:, 1]] = mesh.xCell - 0.5 * dc + mesh.yVertex[verticesOnCell[:, 1]] = mesh.yCell - dc * numpy.sqrt(3.) / 6. + + mesh['angleEdge'] = (('nEdges'), numpy.zeros((nEdges,), 'f8')) + mesh.angleEdge[edgesOnCell[:, 1]] = numpy.pi / 3. + mesh.angleEdge[edgesOnCell[:, 2]] = 2. * numpy.pi / 3. + + mesh['dcEdge'] = (('nEdges'), dc * numpy.ones((nEdges,), 'f8')) + mesh['dvEdge'] = mesh.dcEdge * numpy.sqrt(3.) / 3. + + mesh['areaCell'] = \ + (('nCells'), dc**2 * numpy.sqrt(3.) / 2. * numpy.ones((nCells,), 'f8')) + + mesh['areaTriangle'] = \ + (('nVertices'), dc**2 * numpy.sqrt(3.) / + 4. * numpy.ones((nVertices,), 'f8')) + + mesh['kiteAreasOnVertex'] = \ + (('nVertices', 'vertexDegree'), + dc**2 * numpy.sqrt(3.) / 12. * numpy.ones((nVertices, vertexDegree), + 'f8')) + + mesh['meshDensity'] = (('nCells',), numpy.ones((nCells,), 'f8')) + + +def add_one_to_indices(mesh): + '''Neede to adhere to Fortran indexing''' + indexVars = ['indexToCellID', 'indexToEdgeID', 'indexToVertexID', + 'cellsOnCell', 'edgesOnCell', 'verticesOnCell', + 'cellsOnEdge', 'edgesOnEdge', 'verticesOnEdge', + 'cellsOnVertex', 'edgesOnVertex'] + for var in indexVars: + mesh[var] = mesh[var] + 1 + + +def make_diff(mesh, refMeshFileName, diffFileName): + + refMesh = xarray.open_dataset(refMeshFileName) + diff = xarray.Dataset() + for variable in mesh.data_vars: + if variable in refMesh: + diff[variable] = mesh[variable] - refMesh[variable] + print(diff[variable].name, float(numpy.abs(diff[variable]).max())) + else: + print('mesh has extra variable {}'.format(mesh[variable].name)) + + for variable in refMesh.data_vars: + if variable not in mesh: + print('mesh mising variable {}'.format(refMesh[variable].name)) + + for attr in refMesh.attrs: + if attr not in mesh.attrs: + print('mesh mising attribute {}'.format(attr)) + + for attr in mesh.attrs: + if attr not in refMesh.attrs: + print('mesh has extra attribute {}'.format(attr)) + + write_netcdf(diff, diffFileName) + + +def main(): + + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('--nx', dest='nx', type=int, required=True, + help='Cells in x direction') + parser.add_argument('--ny', dest='ny', type=int, required=True, + help='Cells in y direction') + parser.add_argument('--dc', dest='dc', type=float, required=True, + help='Distance between cell centers in meters') + parser.add_argument('--npx', '--nonperiodic_x', dest='nonperiodic_x', + action="store_true", + help='non-periodic in x direction') + parser.add_argument('--npy', '--nonperiodic_y', dest='nonperiodic_y', + action="store_true", + help='non-periodic in y direction') + parser.add_argument('-o', '--outFileName', dest='outFileName', type=str, + required=False, default='grid.nc', + help='The name of the output file') + + args = parser.parse_args() + + make_planar_hex_mesh(args.nx, args.ny, args.dc, + args.nonperiodic_x, args.nonperiodic_y, + args.outFileName) + + +if __name__ == '__main__': + main() diff --git a/conda_package/mpas_tools/split_grids.py b/conda_package/mpas_tools/split_grids.py new file mode 100755 index 000000000..9ce5c5109 --- /dev/null +++ b/conda_package/mpas_tools/split_grids.py @@ -0,0 +1,290 @@ +#!/usr/bin/env python +""" +Tool to split 2 previously merged MPAS non-contiguous meshes into separate files. +Typical usage is: + split_grids.py -1 outfile1.nc -2 outfile2.nc infile +The optional arguments for nCells, nEdges, nVertices, and maxEdges should +generally not be required as this information is saved in the combined mesh file +as global attributes by the merge_grids.py script. +""" + +import os +import sys +import json +import argparse + +from datetime import datetime + +from netCDF4 import Dataset + + +def parse_args(args=None): + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + + parser.add_argument('infile', metavar='MESHFILE', + help='Mesh file to split') + + parser.add_argument('-1', '--outfile1', default='mesh1.nc', metavar='FILENAME', + help='File name for first mesh output \n(default: %(default)s)') + + parser.add_argument('-2', '--outfile2', default='mesh2.nc', metavar='FILENAME', + help='File name for second mesh output \n(default: %(default)s)') + + parser.add_argument('--nCells', type=int, + help='The number of cells in the first mesh \n' + '(default: the value specified in MESHFILE global ' + 'attribute merge_point)') + + parser.add_argument('--nEdges', type=int, + help='The number of edges in the first mesh \n' + '(default: the value specified in MESHFILE global ' + 'attribute merge_point)') + + parser.add_argument('--nVertices', type=int, + help='The number of vertices in the first mesh \n' + '(default: the value specified in MESHFILE global ' + 'attribute merge_point)') + + parser.add_argument('--maxEdges', type=int, nargs=2, metavar=('MAXEDGES1', 'MAXEDGES2'), + help='The number of maxEdges in each mesh \n' + '(default: the value specified in MESHFILE global ' + 'attribute merge_point\n OR: will use MESHFILE ' + 'maxEdges dimension and assume same for both)') + + return parser.parse_args(args) + + +def split_grids(infile=None, outfile1=None, outfile2=None, + nCells=None, nEdges=None, nVertices=None, maxEdges=None, runner=None): + """ + Split two previously merged MPAS non-contiguous meshes together into + separate files. Typical usage is: + + .. code:: python + + split_grids(infile='infile.nc', outfile1='outfile1.nc', outfile2='outfile2.nc') + + The optional arguments for ``nCells``, ``nEdges``, ``nVertices``, and ``maxEdges`` + should generally not be required as this information sould have been saved in + ``infiles``'s global attribute ``merge_point`` when created by + :func:`mpas_tools.merge_grids.merge_grids`. + + Parameters + ---------- + infile : str + The file name for the mesh to split + + outfile1 : str + The file name for the first split mesh + + outfile2 : str + The file name for the second split mesh + + nCells : int, optional + The number of cells in the first mesh (default: the value specified in + infile global attribute merge_point) + + nEdges : int, optional + The number of edges in the first mesh (default: the value specified in + infile global attribute merge_point + + nVertices : int, optional + The number of vertices in the first mesh (default: the value specified in + infile global attribute merge_point + + maxEdges : list[int, int], optional + A list of the number of max edges (int) in each mesh (default: the value + specified in infile global attribute merge_point OR will use infile + maxEdges dimension and assume same for both) + + runner : str, optional + The command to write into the global history attribute of the outfile + """ + now = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + if not runner: + runner = '{}.split_grids(infile={}, outfile1={}, outfile2={}, nCells={},' \ + 'nEdges={}, nVertices={})'.format(os.path.splitext(__file__)[0], + infile, outfile1, outfile2, + nCells, nEdges, nVertices) + + merge_point_args_missing = (nCells is None, + nEdges is None, + nVertices is None) + + print('Opening {} to split'.format(infile)) + with Dataset(infile) as nc_in: + # NOTE: Because nCells, nEdges, and nVertices are optional arguments and + # the previous merge point can be specified in the mesh file, we + # need to do some complicated error handling. + merge_point_in_file = 'merge_point' in nc_in.ncattrs() + if not merge_point_in_file and any(merge_point_args_missing): + raise ValueError('ERROR: Previous merge point under specified!\n' + ' nCells, nEdges, and nVertices options must all ' + 'be given, or merge_point global attribute must exist ' + 'in {}'.format(infile)) + elif merge_point_in_file and not any(merge_point_args_missing): + print('Warning: command line arguments are overriding previous merge ' + 'point as specified in {} merge_point global' + ' attribute'.format(infile)) + elif merge_point_in_file: + if not all(merge_point_args_missing): + print('Warning: nCells, nEdges, and nVertices options must all ' + 'be given to override speification in {} merge_point global ' + 'attribute'.format(infile)) + try: + mp = json.loads(nc_in.merge_point) + except ValueError: + raise ValueError('ERROR: {} merge_point global attribute is not valid JSON.\n' + ' merge_point: {}'.format(infile, nc_in.merge_point)) + + mp_keyset = set(mp) + if {'nCells', 'nEdges', 'nVertices'} <= mp_keyset: + nCells = mp['nCells'] + nEdges = mp['nEdges'] + nVertices = mp['nVertices'] + else: + raise ValueError('ERROR: merge_point global attribute of {} must ' + 'contain nCells, nEdges, and nVertices.\n' + ' merge_point: {}'.format(infile, mp)) + if {'maxEdges1', 'maxEdges2'} <= mp_keyset: + maxEdges = [mp['maxEdges1'], mp['maxEdges2']] + + print('Creating the mesh files:\n {}\n {}'.format( + outfile1, outfile2)) + with Dataset(outfile1, 'w', format="NETCDF3_CLASSIC") as mesh1, \ + Dataset(outfile2, 'w', format="NETCDF3_CLASSIC") as mesh2: + mesh1.createDimension('nCells', nCells) + mesh1.createDimension('nEdges', nEdges) + mesh1.createDimension('nVertices', nVertices) + mesh1.createDimension('TWO', 2) + mesh1.createDimension('vertexDegree', + nc_in.dimensions['vertexDegree'].size) + + mesh2.createDimension('nCells', nc_in.dimensions['nCells'].size - nCells) + mesh2.createDimension('nEdges', nc_in.dimensions['nEdges'].size - nEdges) + mesh2.createDimension('nVertices', nc_in.dimensions['nVertices'].size - nVertices) + mesh2.createDimension('TWO', 2) + mesh2.createDimension('vertexDegree', + nc_in.dimensions['vertexDegree'].size) + + if 'StrLen' in nc_in.dimensions: + mesh1.createDimension('StrLen', nc_in.dimensions['StrLen'].size) + mesh2.createDimension('StrLen', nc_in.dimensions['StrLen'].size) + + if maxEdges is None: + maxEdges = [nc_in.dimensions['maxEdges'].size, + nc_in.dimensions['maxEdges'].size] + + mesh1.createDimension('maxEdges', maxEdges[0]) + mesh1.createDimension('maxEdges2', maxEdges[0] * 2) + + mesh2.createDimension('maxEdges', maxEdges[1]) + mesh2.createDimension('maxEdges2', maxEdges[1] * 2) + + mesh1.createDimension('nVertLevels', nc_in.dimensions['nVertLevels'].size) + mesh1.createDimension('nVertInterfaces', nc_in.dimensions['nVertInterfaces'].size) + mesh1.createDimension('Time', size=None) # make unlimited + + mesh2.createDimension('nVertLevels', nc_in.dimensions['nVertLevels'].size) + mesh2.createDimension('nVertInterfaces', nc_in.dimensions['nVertInterfaces'].size) + mesh2.createDimension('Time', size=None) # make unlimited + + print('Splitting variable:') + for var in nc_in.variables: + print(' {}'.format(var)) + var_in = nc_in.variables[var] + + var1 = mesh1.createVariable(var, var_in.dtype, var_in.dimensions) + var2 = mesh2.createVariable(var, var_in.dtype, var_in.dimensions) + + slice1, slice2 = var_slice(var_in.dimensions, nc_in, + nCells, nEdges, nVertices, maxEdges) + + var1[:] = nc_in.variables[var][slice1] + var2[:] = nc_in.variables[var][slice2] + + # Adjust the indexes + if var == 'indexToCellID': + var2[:] -= nCells + elif var == 'indexToEdgeID': + var2[:] -= nEdges + elif var == 'indexToVertexID': + var2[:] -= nVertices + elif var in ['cellsOnCell', 'cellsOnEdge', 'cellsOnVertex']: + tmp = var2[...] + tmp[tmp > 0] -= nCells + var2[:] = tmp + elif var in ['edgesOnCell', 'edgesOnEdge', 'edgesOnVertex']: + tmp = var2[...] + tmp[tmp > 0] -= nEdges + var2[:] = tmp + elif var in ['verticesOnCell', 'verticesOnEdge']: + tmp = var2[...] + tmp[tmp > 0] -= nVertices + var2[:] = tmp + + attr_to_copy = ("on_a_sphere", "sphere_radius", "is_periodic") + for attr in attr_to_copy: + if attr in nc_in.ncattrs(): + mesh1.setncattr(attr, nc_in.getncattr(attr)) + mesh2.setncattr(attr, nc_in.getncattr(attr)) + else: + print("Warning: '{0}' global attribute not present in input " + "file. '{0}' will not be added to the two output " + "files.".format(attr)) + + run_command = '{}: {} \n'.format(now, runner) + if 'history' in nc_in.ncattrs(): + mesh1.history = maybe_encode(run_command + nc_in.history) + mesh2.history = maybe_encode(run_command + nc_in.history) + else: + mesh1.history = maybe_encode(run_command) + mesh2.history = maybe_encode(run_command) + + print('Split complete! Mesh files:\n {}\n {}'.format(outfile1, outfile2)) + + +def var_slice(dimensions, nc_in, nCells, nEdges, nVertices, maxEdges): + slice1 = () + slice2 = () + for dim in dimensions: + if dim == 'nCells': + slice1 += (slice(0, nCells),) + slice2 += (slice(nCells, nc_in.dimensions['nCells'].size),) + elif dim == 'nEdges': + slice1 += (slice(0, nEdges),) + slice2 += (slice(nEdges, nc_in.dimensions['nEdges'].size),) + elif dim == 'nVertices': + slice1 += (slice(0, nVertices),) + slice2 += (slice(nVertices, nc_in.dimensions['nVertices'].size),) + elif dim == 'maxEdges': + slice1 += (slice(0, maxEdges[0]),) + slice2 += (slice(0, maxEdges[1]),) + elif dim == 'maxEdges2': + slice1 += (slice(0, maxEdges[0]*2),) + slice2 += (slice(0, maxEdges[1]*2),) + else: + slice1 += (slice(None),) + slice2 += (slice(None),) + + return slice1, slice2 + + +# NOTE: Python 2 and 3 string fun conflicting with NC_CHAR vs NC_STRING, see: +# https://github.com/Unidata/netcdf4-python/issues/529 +def maybe_encode(string, encoding='ascii'): + try: + return string.encode(encoding) + except UnicodeEncodeError: + return string + + +def main(): + arguments = parse_args() + arguments.runner = ' '.join(sys.argv[:]) + split_grids(**vars(arguments)) + + +if __name__ == '__main__': + main() diff --git a/conda_package/mpas_tools/tests/__init__.py b/conda_package/mpas_tools/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/conda_package/mpas_tools/tests/test_conversion.py b/conda_package/mpas_tools/tests/test_conversion.py new file mode 100755 index 000000000..4cce4d55a --- /dev/null +++ b/conda_package/mpas_tools/tests/test_conversion.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +from mpas_tools.conversion import convert, cull, mask +from mpas_tools.io import write_netcdf +import matplotlib +matplotlib.use('Agg') +from geometric_features import read_feature_collection +import xarray + + +def test_conversion(): + dsMesh = xarray.open_dataset( + 'mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc') + dsMesh = convert(dsIn=dsMesh) + write_netcdf(dsMesh, 'mesh.nc') + + dsMask = xarray.open_dataset( + 'mesh_tools/mesh_conversion_tools/test/land_mask_final.nc') + dsCulled = cull(dsIn=dsMesh, dsMask=dsMask) + write_netcdf(dsCulled, 'culled_mesh.nc') + + dsMask = xarray.open_dataset( + 'mesh_tools/mesh_conversion_tools/test/land_mask_final.nc') + + fcMask = read_feature_collection( + 'mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson') + dsMask = mask(dsMesh=dsMesh, fcMask=fcMask) + write_netcdf(dsMask, 'antarctic_mask.nc') + + +if __name__ == '__main__': + test_conversion() diff --git a/conda_package/mpas_tools/translate.py b/conda_package/mpas_tools/translate.py new file mode 100755 index 000000000..c8bd93b0e --- /dev/null +++ b/conda_package/mpas_tools/translate.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +from optparse import OptionParser + +import xarray + +from mpas_tools.io import write_netcdf + + +def translate(mesh, xOffset=0., yOffset=0.): + ''' + Translates the coordinate system of the planar MPAS mesh by an arbirary + shift in x and/or y + + Parameters + ---------- + mesh : ``xarray.Dataset`` + A planar mesh to translate + + xOffset : float, optional + user-specified shift in the x-direction + + yOffset : float, optional + user-specified shift in the y-direction + + ''' + + mesh.xCell[:] += xOffset + mesh.yCell[:] += yOffset + mesh.xVertex[:] += xOffset + mesh.yVertex[:] += yOffset + mesh.xEdge[:] += xOffset + mesh.yEdge[:] += yOffset + + +def center_on_mesh(mesh, otherMesh): + ''' + Translates the coordinate system of the planar MPAS mesh by shifting the + origin to the center of the domain described in a separate mesh + + Parameters + ---------- + mesh : ``xarray.Dataset`` + A planar mesh to translate + + otherMesh : ``xarray.Dataset`` + Another planar mesh whose center will become the center of this mesh. + Uses xCell,yCell or, if those fields do not exist, will secondly try + x1,y1 fields + ''' + + mpasXcenter, mpasYcenter = get_center(mesh) + + if 'xCell' in otherMesh and 'yCell' in otherMesh: + dataXcenter, dataYcenter = get_center(otherMesh, xVar='xCell', + yVar='yCell') + elif 'x1' in otherMesh and 'y1' in otherMesh: + dataXcenter, dataYcenter = get_center(otherMesh, xVar='x1', yVar='y1') + else: + raise ValueError('reference mesh has neither xCell/yCell nor x1/y1 ' + 'fields.') + + translate(mesh, dataXcenter-mpasXcenter, dataYcenter-mpasYcenter) + + +def center(mesh): + ''' + Translates the coordinate system of the planar MPAS mesh by shifting the + origin to the center of the domain + + Parameters + ---------- + mesh : ``xarray.Dataset`` + A planar mesh to translate + ''' + mpasXcenter, mpasYcenter = get_center(mesh) + + translate(mesh, -mpasXcenter, -mpasYcenter) + + +def get_center(mesh, xVar='xCell', yVar='yCell'): + ''' + Find the center of the mesh + ''' + + xCenter = (mesh[xVar].min() + mesh[xVar].max()) * 0.5 + yCenter = (mesh[yVar].min() + mesh[yVar].max()) * 0.5 + + return xCenter, yCenter + + +def main(): + + print("== Gathering information. (Invoke with --help for more details. " + "All arguments are optional)") + parser = OptionParser() + parser.description = \ + "This script translates the coordinate system of the planar MPAS " \ + "mesh specified with the -f flag. \n" \ + "There are 3 possible methods to choose from:\n" \ + "1) shift the origin to the center of the domain\n" \ + "2) arbirary shift in x and/or y\n" \ + "3) shift to the center of the domain described in a separate file\n" + parser.add_option("-f", "--file", dest="fileInName", + help="MPAS planar grid file name.", default="grid.nc", + metavar="FILENAME") + parser.add_option("-d", "--datafile", dest="dataFileName", + help="data file name to which to match the domain " + "center of. Uses xCell,yCell or, if those fields " + "do not exist, will secondly try x1,y1 fields.", + metavar="FILENAME") + parser.add_option("-x", dest="xshift", + help="user-specified shift in the x-direction.", + type="float", default=0.0, metavar="SHIFT_VALUE") + parser.add_option("-y", dest="yshift", + help="user-specified shift in the y-direction.", + type="float", default=0.0, metavar="SHIFT_VALUE") + parser.add_option("-c", dest="center", + help="shift so origin is at center of domain", + action="store_true", default=False) + for option in parser.option_list: + if option.default != ("NO", "DEFAULT"): + option.help += (" " if option.help else "") + "[default: %default]" + options, args = parser.parse_args() + + print("Attempting to translate coordinates in file: {}".format( + options.fileInName)) + + if options.dataFileName is not None and \ + (options.xshift != 0. or options.yshift != 0.): + raise ValueError('Specifying a datafile AND one or both of x/y shift ' + 'is invalid. Please select one of those methods ' + 'only.') + + if options.center and (options.xshift != 0. or options.yshift != 0.): + raise ValueError('Specifying a shift to center AND one or both of x/y ' + 'shift is invalid. Please select one of those ' + 'methods only.') + + if options.dataFileName is not None and options.center: + raise ValueError('Specifying a datafile AND a shift to center is ' + 'invalid. Please select one of those methods only.') + + if not options.center and (options.xshift == 0.) and \ + (options.yshift == 0.) and options.dataFileName is None: + raise ValueError('No translation method was specified. Please select ' + 'one. Run with -h for more information.') + + mesh = xarray.open_dataset(options.fileInName) + if options.dataFileName is not None: + print(" Translating coordinates in {} so the domain center matches " + "the domain center in {}.\n\n".format(options.fileInName, + options.dataFileName)) + otherMesh = xarray.open_dataset(options.dataFileName) + center_on_mesh(mesh, otherMesh) + + if options.xshift != 0. or options.yshift != 0.: + print(" Translating coordinates in {} by user-specified values. " + "X-shift={:f}; Y-shift={:f}\n\n".format(options.fileInName, + options.xshift, + options.yshift)) + + translate(mesh, options.xshift, options.yshift) + + if options.center: + print(" Translating coordinates in %s so the origin is the center of " + "the domain.\n\n") + + center(mesh) + + # close the file so we can re-open it for writing + mesh.close() + write_netcdf(mesh, options.fileInName) + + print("Translation completed.") + + +if __name__ == '__main__': + main() diff --git a/python_scripts/paraview_vtk_field_extractor/utils.py b/conda_package/mpas_tools/viz.py similarity index 51% rename from python_scripts/paraview_vtk_field_extractor/utils.py rename to conda_package/mpas_tools/viz.py index fde2a33c0..bda7adbc5 100644 --- a/python_scripts/paraview_vtk_field_extractor/utils.py +++ b/conda_package/mpas_tools/viz.py @@ -1,39 +1,60 @@ #!/usr/bin/env python """ -Name: utils.py +Name: viz.py Authors: Xylar Asay-Davis Utility library for various scripts used to extract vtk geometry from NetCDF files on MPAS grids. """ -from pyevtk.vtk import VtkFile, VtkPolyData +from __future__ import absolute_import, division, print_function, \ + unicode_literals -import sys, glob +try: + from evtk.vtk import VtkFile, VtkPolyData +except ImportError: + from pyevtk.vtk import VtkFile, VtkPolyData + +import sys +import glob import numpy +from builtins import input + from netCDF4 import Dataset as NetCDFFile try: from progressbar import ProgressBar, Percentage, Bar, ETA use_progress_bar = True -except: +except ImportError: use_progress_bar = False -def is_valid_mesh_var(mesh_file, variable_name): + +def open_netcdf(file_name): + nc_file = NetCDFFile(file_name, 'r') + # turn off auto mask (if applicable) + try: + nc_file.set_auto_mask(False) + except AttributeError: + pass + return nc_file + + +def is_valid_mesh_var(mesh_file, variable_name): # {{{ if mesh_file is None: return False if variable_name not in mesh_file.variables: return False - return 'Time' not in mesh_file.variables[variable_name].dimensions + return 'Time' not in mesh_file.variables[variable_name].dimensions # }}} -def get_var(variable_name, mesh_file, time_series_file): + +def get_var(variable_name, mesh_file, time_series_file): # {{{ if is_valid_mesh_var(mesh_file, variable_name): return mesh_file.variables[variable_name] else: - return time_series_file.variables[variable_name] + return time_series_file.variables[variable_name] # }}} def setup_time_indices(fn_pattern, xtimeName): # {{{ @@ -56,8 +77,8 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ all_times = [] if len(file_list) == 0: - print "No files to process." - print "Exiting..." + print("No files to process.") + print("Exiting...") sys.exit(0) if use_progress_bar: @@ -65,29 +86,40 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ ETA()] time_bar = ProgressBar(widgets=widgets, maxval=len(file_list)).start() else: - print "Build time indices..." + print("Build time indices...") i_file = 0 + allTIndex = 0 for file_name in file_list: try: - nc_file = NetCDFFile(file_name, 'r') + nc_file = open_netcdf(file_name) except IOError: - print "Warning: could not open {}".format(file_name) + print("Warning: could not open {}".format(file_name)) continue if 'Time' not in nc_file.dimensions or xtimeName is None: local_times = ['0'] else: - if xtimeName not in nc_file.variables: - raise ValueError("xtime variable name {} not found in " - "{}".format(xtimeName, file_name)) local_times = [] - xtime = nc_file.variables[xtimeName][:, :] - for index in range(xtime.shape[0]): - local_times.append(''.join(xtime[index, :])) + if xtimeName == 'none': + # no xtime variable so just use integers converted to strings + for index in range(len(nc_file.dimensions['Time'])): + local_times.append(allTIndex) + allTIndex += 1 + else: + if xtimeName not in nc_file.variables: + raise ValueError("xtime variable name {} not found in " + "{}".format(xtimeName, file_name)) + xtime = nc_file.variables[xtimeName] + if len(xtime.shape) == 2: + xtime = xtime[:, :] + for index in range(xtime.shape[0]): + local_times.append(xtime[index, :].tostring()) + else: + local_times = xtime[:] - if(len(local_times) == 0): - local_times = ['0'] + if(len(local_times) == 0): + local_times = ['0'] nTime = len(local_times) @@ -108,14 +140,8 @@ def setup_time_indices(fn_pattern, xtimeName): # {{{ return (local_indices, file_names) # }}} -# Parses the indices to be extracted along a given dimension. -# The index_string can be fomatted as follows: -# -- no indices are to be extracted -# n -- the index n is to be extracted -# m,n,p -- the list of indices is to be extracted -# m:n -- all indices from m to n are to be extracted (including m but -# excluding n, in the typical python indexing convention) -def parse_extra_dim(dim_name, index_string, time_series_file, mesh_file):#{{{ +def parse_extra_dim(dim_name, index_string, time_series_file, mesh_file): + # {{{ """ Parses the indices to be extracted along a given dimension. The index_string can be fomatted as follows: @@ -165,18 +191,18 @@ def parse_extra_dim(dim_name, index_string, time_series_file, mesh_file):#{{{ # zero-pad integer indices if len(numerical_indices) > 0: max_index = numpy.amax(numerical_indices) - pad = int(numpy.log10(max(max_index,1)))+1 - template = '%%0%dd'%pad + pad = int(numpy.log10(max(max_index, 1)))+1 + template = '%%0%dd' % pad for i in range(len(indices)): try: val = int(indices[i]) except ValueError: continue - indices[i] = template%(val) + indices[i] = template % (val) return indices -#}}} +# }}} def parse_time_indices(index_string, time_indices, time_file_names): # {{{ @@ -288,45 +314,67 @@ def parse_index_string(index_string, dim_size): # {{{ return indices, numerical_indices # }}} -# Parses a list of dimensions and corresponding indices separated by equals signs. -# Optionally, a max_index_count (typically 1) can be provided, indicating that -# indices beyond max_index_count-1 will be ignored in each dimension. -# Optionally, topo_dim contains the name of a dimension associated with the -# surface or bottom topography (e.g. nVertLevels for MPAS-Ocean) -# If too_dim is provided, topo_cell_indices_name can optionally be either -# a constant value for the index vertical index to the topography or -# the name of a field with dimension nCells that contains the vertical index of -# the topography. def parse_extra_dims(dimension_list, time_series_file, mesh_file, - max_index_count=None):#{{{ - if not dimension_list: - return {} + topo_dim=None, topo_cell_index_name=None, + max_index_count=None): + # {{{ + ''' + Parses a list of dimensions and corresponding indices separated by equals + signs. Optionally, a max_index_count (typically 1) can be provided, + indicating that indices beyond max_index_count-1 will be ignored in each + dimension. Optionally, topo_dim contains the name of a dimension associated + with the surface or bottom topography (e.g. nVertLevels for MPAS-Ocean) + If topo_dim is provided, topo_cell_index_name can optionally be either + a constant value for the vertical index to the topography or the name of a + field with dimension nCells that contains the vertical index of the + topography. + ''' extra_dims = {} - for dim_item in dimension_list: - (dimName,index_string) = dim_item.split('=') - indices = parse_extra_dim(dimName, index_string, time_series_file, mesh_file) - if indices is not None: - if max_index_count is None or len(indices) <= max_index_count: - extra_dims[dimName] = indices + topo_cell_indices = None + + if dimension_list is not None: + for dim_item in dimension_list: + (dimName, index_string) = dim_item.split('=') + indices = parse_extra_dim(dimName, index_string, time_series_file, + mesh_file) + if indices is not None: + if max_index_count is None or len(indices) <= max_index_count: + extra_dims[dimName] = indices + else: + extra_dims[dimName] = indices[0:max_index_count] + + if topo_dim is not None: + if topo_cell_index_name is not None: + if (mesh_file is not None) and \ + (topo_cell_index_name in mesh_file.variables): + topo_cell_indices = \ + mesh_file.variables[topo_cell_index_name][:]-1 else: - extra_dims[dimName] = indices[0:max_index_count] - - return extra_dims -#}}} + topo_cell_indices = \ + time_series_file.variables[topo_cell_index_name][:]-1 + else: + index = len(mesh_file.dimensions[topo_dim])-1 + nCells = len(mesh_file.dimensions['nCells']) + topo_cell_indices = index*numpy.ones(nCells, int) + return extra_dims, topo_cell_indices +# }}} -# Creates a list of variables names to be extracted. Prompts for indices -# of any extra dimensions that were not specified on the command line. -# extra_dims should be a dictionary of indices along extra dimensions (as -# opposed to "basic" dimensions). basic_dims is a list of dimension names -# that should be excluded from extra_dims. include_dims is a list of -# possible dimensions, one of which must be in each vairable to be extracted -# (used in expanding command line placeholders "all", "allOnCells", etc.) -def setup_dimension_values_and_sort_vars(time_series_file, mesh_file, variable_list, extra_dims, - basic_dims=['nCells', 'nEdges', 'nVertices', 'Time'], - include_dims=['nCells', 'nEdges', 'nVertices']):#{{{ +def setup_dimension_values_and_sort_vars( + time_series_file, mesh_file, variable_list, extra_dims, + basic_dims=['nCells', 'nEdges', 'nVertices', 'Time'], + include_dims=['nCells', 'nEdges', 'nVertices']): # {{{ + ''' + Creates a list of variables names to be extracted. Prompts for indices + of any extra dimensions that were not specified on the command line. + extra_dims should be a dictionary of indices along extra dimensions (as + opposed to "basic" dimensions). basic_dims is a list of dimension names + that should be excluded from extra_dims. include_dims is a list of + possible dimensions, one of which must be in each vairable to be extracted + (used in expanding command line placeholders "all", "allOnCells", etc.) + ''' def add_var(variables, variable_name, include_dims, exclude_dims=None): if variable_name in variable_names: @@ -353,17 +401,20 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): variable_names = [] exclude_dims = ['Time'] for variable_name in time_series_file.variables: - add_var(time_series_file.variables, str(variable_name), include_dims, exclude_dims=None) + add_var(time_series_file.variables, str(variable_name), + include_dims, exclude_dims=None) if mesh_file is not None: for variable_name in mesh_file.variables: - add_var(mesh_file.variables, str(variable_name), include_dims, exclude_dims) + add_var(mesh_file.variables, str(variable_name), include_dims, + exclude_dims) else: variable_names = variable_list.split(',') - for suffix in ['Cells','Edges','Vertices']: - include_dim = 'n%s'%suffix - if ('allOn%s'%suffix in variable_names) and (include_dim in include_dims): - variable_names.remove('allOn%s'%suffix) + for suffix in ['Cells', 'Edges', 'Vertices']: + include_dim = 'n%s' % suffix + if ('allOn%s' % suffix in variable_names) and (include_dim in + include_dims): + variable_names.remove('allOn%s' % suffix) exclude_dims = ['Time'] for variable_name in time_series_file.variables: add_var(time_series_file.variables, str(variable_name), @@ -371,7 +422,8 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): if mesh_file is not None: for variable_name in mesh_file.variables: add_var(mesh_file.variables, str(variable_name), - include_dims=[include_dim], exclude_dims=exclude_dims) + include_dims=[include_dim], + exclude_dims=exclude_dims) variable_names.sort() @@ -384,27 +436,31 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): nc_file = time_series_file field_dims = nc_file.variables[variable_name].dimensions for dim in field_dims: - if ((dim in basic_dims) or (dim in extra_dims) or (dim in promptDimNames)): + if ((dim in basic_dims) or (dim in extra_dims) + or (dim in promptDimNames)): # this dimension has already been accounted for continue promptDimNames.append(str(dim)) if display_prompt: - print "" - print "Need to define additional dimension values" + print("") + print("Need to define additional dimension values") display_prompt = False dim_size = len(nc_file.dimensions[dim]) valid = False while not valid: - print "Valid range for dimension %s between 0 and %d"%(dim, dim_size-1) - index_string = raw_input("Enter a value for dimension %s: "%(dim)) - indices = parse_extra_dim(str(dim), index_string, time_series_file, mesh_file) + print("Valid range for dimension %s between 0 and %d" + "" % (dim, dim_size-1)) + index_string = input("Enter a value for dimension %s: " + "" % (dim)) + indices = parse_extra_dim(str(dim), index_string, + time_series_file, mesh_file) valid = indices is not None if valid: extra_dims[str(dim)] = indices else: - print " -- Invalid value, please re-enter --" + print(" -- Invalid value, please re-enter --") empty_dims = [] for dim in extra_dims: @@ -413,7 +469,8 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): for variable_name in variable_names: - field_dims = get_var(variable_name, mesh_file, time_series_file).dimensions + field_dims = get_var(variable_name, mesh_file, + time_series_file).dimensions skip = False for dim in field_dims: if dim in empty_dims: @@ -425,28 +482,29 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): # Setting dimension values: indices = [] for dim in field_dims: - if dim not in ['Time', 'nCells', 'nEdges', 'nVertices']: + if dim not in basic_dims: indices.append(extra_dims[dim]) if len(indices) == 0: dim_vals = None elif len(indices) == 1: dim_vals = [] for index0 in indices[0]: - dim_vals.append([index0]) + dim_vals.append([index0]) elif len(indices) == 2: dim_vals = [] for index0 in indices[0]: for index1 in indices[1]: - dim_vals.append([index0,index1]) + dim_vals.append([index0, index1]) elif len(indices) == 3: dim_vals = [] for index0 in indices[0]: for index1 in indices[1]: for index2 in indices[2]: - dim_vals.append([index0,index1,index2]) + dim_vals.append([index0, index1, index2]) else: - print "variable %s has too many extra dimensions and will be skipped."%variable_name - continue + print("variable %s has too many extra dimensions and will be " + "skipped." % variable_name) + continue if "nCells" in field_dims: cellVars.append(variable_name) @@ -459,39 +517,44 @@ def add_var(variables, variable_name, include_dims, exclude_dims=None): del dim_vals return (all_dim_vals, cellVars, vertexVars, edgeVars) -#}}} - -# Print a summary of the time levels, mesh file, transects file (optional) -# and variables to be extracted. -def summarize_extraction(mesh_file, time_indices, cellVars, vertexVars, edgeVars, - transects_file=None):#{{{ - print "" - print "Extracting a total of %d time levels."%(len(time_indices)) - print "Using file '%s' as the mesh file for this extraction."%(mesh_file) +# }}} + + +def summarize_extraction(mesh_file, time_indices, cellVars, vertexVars, + edgeVars, transects_file=None): # {{{ + ''' + print a summary of the time levels, mesh file, transects file (optional) + and variables to be extracted. + ''' + + print("") + print("Extracting a total of %d time levels." % (len(time_indices))) + print("Using file '%s' as the mesh file for this extraction." + "" % (mesh_file)) if transects_file is not None: - print "Using file '%s' as the transects file."%(transects_file) - print "" - print "" - print "The following variables will be extracted from the input file(s)." - print "" + print("Using file '%s' as the transects file." % (transects_file)) + print("") + print("") + print("The following variables will be extracted from the input file(s).") + print("") if len(cellVars) > 0: - print " Variables with 'nCells' as a dimension:" + print(" Variables with 'nCells' as a dimension:") for variable_name in cellVars: - print " name: %s"%(variable_name) + print(" name: %s" % (variable_name)) if len(vertexVars) > 0: - print " Variables with 'nVertices' as a dimension:" + print(" Variables with 'nVertices' as a dimension:") for variable_name in vertexVars: - print " name: %s"%(variable_name) + print(" name: %s" % (variable_name)) if len(edgeVars) > 0: - print " Variables with 'nEdges' as adimension:" + print(" Variables with 'nEdges' as adimension:") for variable_name in edgeVars: - print " name: %s"%(variable_name) + print(" name: %s" % (variable_name)) - print "" -#}}} + print("") +# }}} def write_pvd_header(path, prefix): # {{{ @@ -503,18 +566,18 @@ def write_pvd_header(path, prefix): # {{{ return pvd_file # }}} -def get_hyperslab_name_and_dims(var_name, extra_dim_vals):#{{{ +def get_hyperslab_name_and_dims(var_name, extra_dim_vals): # {{{ if(extra_dim_vals is None): - return ([var_name],None) + return ([var_name], None) if(len(extra_dim_vals) == 0): - return ([],None) + return ([], None) out_var_names = [] for hyper_slab in extra_dim_vals: pieces = [var_name] pieces.extend(hyper_slab) out_var_names.append('_'.join(pieces)) return (out_var_names, extra_dim_vals) -#}}} +# }}} def write_vtp_header(path, prefix, active_var_index, var_indices, @@ -524,42 +587,46 @@ def write_vtp_header(path, prefix, active_var_index, var_indices, vtkFile = VtkFile("{}/{}".format(path, prefix), VtkPolyData) if xtime is not None: - vtkFile.openElement("metadata") - vtkFile.openElement("xtime") - vtkFile.xml.addText(xtime) - vtkFile.closeElement("xtime") - vtkFile.closeElement("metadata") + vtkFile.openElement(str("metadata")) + vtkFile.openElement(str("xtime")) + vtkFile.xml.addText(str(xtime)) + vtkFile.closeElement(str("xtime")) + vtkFile.closeElement(str("metadata")) vtkFile.openElement(vtkFile.ftype.name) vtkFile.openPiece(npoints=nPoints, npolys=nPolygons) - vtkFile.openElement("Points") - vtkFile.addData("points", vertices) - vtkFile.closeElement("Points") + vtkFile.openElement(str("Points")) + vtkFile.addData(str("points"), vertices) + vtkFile.closeElement(str("Points")) - vtkFile.openElement("Polys") - vtkFile.addData("connectivity", connectivity) - vtkFile.addData("offsets", offsets) - vtkFile.closeElement("Polys") + vtkFile.openElement(str("Polys")) + vtkFile.addData(str("connectivity"), connectivity) + vtkFile.addData(str("offsets"), offsets) + vtkFile.closeElement(str("Polys")) if(cellData): - vtkFile.openData("Cell", scalars=variable_list[active_var_index]) + vtkFile.openData(str("Cell"), + scalars=[str(var) for var in + variable_list[active_var_index]]) for iVar in var_indices: var_name = variable_list[iVar] (out_var_names, dim_list) = \ get_hyperslab_name_and_dims(var_name, all_dim_vals[var_name]) for out_var_name in out_var_names: - vtkFile.addHeader(out_var_name, outType, nPolygons, 1) - vtkFile.closeData("Cell") + vtkFile.addHeader(str(out_var_name), outType, nPolygons, 1) + vtkFile.closeData(str("Cell")) if(pointData): - vtkFile.openData("Point", scalars=variable_list[active_var_index]) + vtkFile.openData(str("Point"), + scalars=[str(var) for var in + variable_list[active_var_index]]) for iVar in var_indices: var_name = variable_list[iVar] (out_var_names, dim_list) = \ get_hyperslab_name_and_dims(var_name, all_dim_vals[var_name]) for out_var_name in out_var_names: - vtkFile.addHeader(out_var_name, outType, nPoints, 1) - vtkFile.closeData("Point") + vtkFile.addHeader(str(out_var_name), outType, nPoints, 1) + vtkFile.closeData(str("Point")) vtkFile.closePiece() vtkFile.closeElement(vtkFile.ftype.name) @@ -571,20 +638,188 @@ def write_vtp_header(path, prefix, active_var_index, var_indices, return vtkFile # }}} +def build_topo_point_and_polygon_lists(nc_file, output_32bit, lonlat): # {{{ + + if output_32bit: + dtype = 'f4' + else: + dtype = 'f8' + + xVertex, yVertex, zVertex = \ + _build_location_list_xyz(nc_file, 'Vertex', output_32bit, lonlat) + + nCells = len(nc_file.dimensions['nCells']) + nEdges = len(nc_file.dimensions['nEdges']) + maxEdges = len(nc_file.dimensions['maxEdges']) + + nEdgesOnCell = nc_file.variables['nEdgesOnCell'][:] + verticesOnCell = nc_file.variables['verticesOnCell'][:, :]-1 + edgesOnCell = nc_file.variables['edgesOnCell'][:, :]-1 + verticesOnEdge = nc_file.variables['verticesOnEdge'][:] - 1 + cellsOnEdge = nc_file.variables['cellsOnEdge'][:] - 1 + + # 4 points for each edge face + nPoints = 4*nEdges + # 1 polygon for each edge and cell + nPolygons = nEdges + nCells + + X = numpy.zeros(nPoints, dtype) + Y = numpy.zeros(nPoints, dtype) + Z = numpy.zeros(nPoints, dtype) + + outIndex = 0 + + # The points on an edge are vertex 0, 1, 1, 0 on that edge, making a + # vertical rectangle if the points are offset + iEdges, voe = numpy.meshgrid(numpy.arange(nEdges), [0, 1, 1, 0], + indexing='ij') + iVerts = verticesOnEdge[iEdges, voe].ravel() + X[:] = xVertex[iVerts] + Y[:] = yVertex[iVerts] + Z[:] = zVertex[iVerts] + vertices = (X, Y, Z) + + verticesOnPolygon = -1*numpy.ones((nPolygons, maxEdges), int) + verticesOnPolygon[0:nEdges, 0:4] = \ + numpy.arange(4*nEdges).reshape(nEdges, 4) + + # Build cells + if use_progress_bar: + widgets = ['Build cell connectivity: ', Percentage(), ' ', Bar(), ' ', + ETA()] + bar = ProgressBar(widgets=widgets, maxval=nCells).start() + else: + print("Build cell connectivity...") + + outIndex = nEdges + + for iCell in range(nCells): + neoc = nEdgesOnCell[iCell] + eocs = edgesOnCell[iCell, 0:neoc] + vocs = verticesOnCell[iCell, 0:neoc] + for index in range(neoc): + iVert = vocs[index] + iEdge = eocs[index] + # which vertex on the edge corresponds to iVert? + coes = cellsOnEdge[iEdge, :] + voes = verticesOnEdge[iEdge, :] + + if coes[0] == iCell: + if voes[0] == iVert: + voe = 0 + else: + voe = 1 + else: + if voes[0] == iVert: + voe = 3 + else: + voe = 2 + + verticesOnPolygon[nEdges+iCell, index] = 4*iEdge + voe + + outIndex += neoc + + if use_progress_bar: + bar.update(iCell) + + if use_progress_bar: + bar.finish() + + validVerts = verticesOnPolygon >= 0 + + if lonlat: + lonEdge = numpy.rad2deg(nc_file.variables['lonEdge'][:]) + latEdge = numpy.rad2deg(nc_file.variables['latEdge'][:]) + lonCell = numpy.rad2deg(nc_file.variables['lonCell'][:]) + latCell = numpy.rad2deg(nc_file.variables['latCell'][:]) + lonPolygon = numpy.append(lonEdge, lonCell) + latPolygon = numpy.append(latEdge, latCell) + + vertices, verticesOnPolygon = _fix_lon_lat_vertices(vertices, + verticesOnPolygon, + validVerts, + lonPolygon) + + if nc_file.on_a_sphere.strip() == 'NO' and \ + nc_file.is_periodic.strip() == 'YES': + if lonlat: + xcoord = lonPolygon + ycoord = latPolygon + else: + xEdge = numpy.rad2deg(nc_file.variables['xEdge'][:]) + yEdge = numpy.rad2deg(nc_file.variables['yEdge'][:]) + xCell = numpy.rad2deg(nc_file.variables['xCell'][:]) + yCell = numpy.rad2deg(nc_file.variables['yCell'][:]) + xcoord = numpy.append(xEdge, xCell) + ycoord = numpy.append(yEdge, yCell) + + vertices, verticesOnPolygon = _fix_periodic_vertices(vertices, + verticesOnPolygon, + validVerts, + xcoord, ycoord, + nc_file.x_period, + nc_file.y_period) + + nPoints = len(vertices[0]) + + # we want to know the cells corresponding to each point. The first two + # points correspond to the first cell, the second two to the second cell + # (if any). + cell_to_point_map = -1*numpy.ones((nPoints), int) + boundary_mask = numpy.zeros((nPoints), bool) + + # first cell on edge always exists + coe = cellsOnEdge[:, 0].copy() + for index in range(2): + voe = verticesOnPolygon[0:nEdges, index] + cell_to_point_map[voe] = coe + boundary_mask[voe] = False + + # second cell on edge may not exist + coe = cellsOnEdge[:, 1].copy() + mask = coe == -1 + # use the first cell if the second doesn't exist + coe[mask] = cellsOnEdge[:, 0][mask] + for index in range(2, 4): + voe = verticesOnPolygon[0:nEdges, index] + cell_to_point_map[voe] = coe + boundary_mask[voe] = mask + + # for good measure, make sure vertices on cell are also accounted for + for index in range(maxEdges): + iCells = numpy.arange(nCells) + voc = verticesOnPolygon[nEdges:nEdges+nCells, index] + mask = index < nEdgesOnCell + cell_to_point_map[voc[mask]] = iCells[mask] + boundary_mask[voc[mask]] = False + + connectivity = verticesOnPolygon[validVerts] + validCount = numpy.sum(numpy.array(validVerts, int), axis=1) + offsets = numpy.cumsum(validCount, dtype=int) + valid_mask = numpy.ones(nCells, bool) + + return vertices, connectivity, offsets, valid_mask, \ + cell_to_point_map, boundary_mask.ravel() # }}} + + def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ - print "Build geometry for fields on cells..." + print("Build geometry for fields on cells...") vertices = _build_location_list_xyz(nc_file, 'Vertex', output_32bit, lonlat) if lonlat: lonCell = numpy.rad2deg(nc_file.variables['lonCell'][:]) + latCell = numpy.rad2deg(nc_file.variables['latCell'][:]) nCells = len(nc_file.dimensions['nCells']) nEdgesOnCell = nc_file.variables['nEdgesOnCell'][:] verticesOnCell = nc_file.variables['verticesOnCell'][:, :] - 1 + # MPAS-O sets non-masked values to total number of vertices instead of 0 + # (as produced in mesh workflow) + verticesOnCell[numpy.where(verticesOnCell == len(vertices[0]))] = 0 validVertices = numpy.zeros(verticesOnCell.shape, bool) for vIndex in range(validVertices.shape[1]): @@ -596,6 +831,21 @@ def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVertices, lonCell) + if nc_file.on_a_sphere.strip() == 'NO' and \ + nc_file.is_periodic.strip() == 'YES': + if lonlat: + xcoord = lonCell + ycoord = latCell + else: + xcoord = nc_file.variables['xCell'][:] + ycoord = nc_file.variables['yCell'][:] + vertices, verticesOnCell = _fix_periodic_vertices(vertices, + verticesOnCell, + validVertices, + xcoord, ycoord, + nc_file.x_period, + nc_file.y_period) + connectivity = verticesOnCell[validVertices] offsets = numpy.cumsum(nEdgesOnCell, dtype=int) valid_mask = numpy.ones(nCells, bool) @@ -604,12 +854,13 @@ def build_cell_geom_lists(nc_file, output_32bit, lonlat): # {{{ def build_vertex_geom_lists(nc_file, output_32bit, lonlat): # {{{ - print "Build geometry for fields on vertices...." + print("Build geometry for fields on vertices....") vertices = _build_location_list_xyz(nc_file, 'Cell', output_32bit, lonlat) if lonlat: lonVertex = numpy.rad2deg(nc_file.variables['lonVertex'][:]) + latVertex = numpy.rad2deg(nc_file.variables['latVertex'][:]) vertexDegree = len(nc_file.dimensions['vertexDegree']) @@ -627,6 +878,23 @@ def build_vertex_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVertices, lonVertex[valid_mask]) + if nc_file.on_a_sphere.strip() == 'NO' and \ + nc_file.is_periodic.strip() == 'YES': + # all remaining entries in cellsOnVertex are valid + validVertices = numpy.ones(cellsOnVertex.shape, bool) + if lonlat: + xcoord = lonVertex[valid_mask] + ycoord = latVertex[valid_mask] + else: + xcoord = nc_file.variables['xVertex'][valid_mask] + ycoord = nc_file.variables['yVertex'][valid_mask] + vertices, cellsOnVertex = _fix_periodic_vertices(vertices, + cellsOnVertex, + validVertices, + xcoord, ycoord, + nc_file.x_period, + nc_file.y_period) + connectivity = cellsOnVertex.ravel() validCount = cellsOnVertex.shape[0] offsets = vertexDegree*numpy.arange(1, validCount+1) @@ -646,6 +914,7 @@ def build_edge_geom_lists(nc_file, output_32bit, lonlat): # {{{ if lonlat: lonEdge = numpy.rad2deg(nc_file.variables['lonEdge'][:]) + latEdge = numpy.rad2deg(nc_file.variables['latEdge'][:]) nEdges = len(nc_file.dimensions['nEdges']) nCells = len(nc_file.dimensions['nCells']) @@ -673,10 +942,25 @@ def build_edge_geom_lists(nc_file, output_32bit, lonlat): # {{{ validVerts = validVerts[valid_mask, :] if lonlat: - vertices, cellsOnVertex = _fix_lon_lat_vertices(vertices, - vertsOnCell, - validVerts, - lonEdge[valid_mask]) + vertices, vertsOnCell = _fix_lon_lat_vertices(vertices, + vertsOnCell, + validVerts, + lonEdge[valid_mask]) + if nc_file.on_a_sphere.strip() == 'NO' and \ + nc_file.is_periodic.strip() == 'YES': + if lonlat: + xcoord = lonEdge[valid_mask] + ycoord = latEdge[valid_mask] + else: + xcoord = nc_file.variables['xEdge'][valid_mask] + ycoord = nc_file.variables['yEdge'][valid_mask] + + vertices, vertsOnCell = _fix_periodic_vertices(vertices, + vertsOnCell, + validVerts, + xcoord, ycoord, + nc_file.x_period, + nc_file.y_period) connectivity = vertsOnCell[validVerts] validCount = numpy.sum(numpy.array(validVerts, int), axis=1) @@ -694,14 +978,18 @@ def get_field_sign(field_name): return (field_name, sign) -def read_field(var_name, mesh_file, time_series_file, extra_dim_vals, time_index, - block_indices, outType, sign=1):#{{{ - def read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays):#{{{ +def read_field(var_name, mesh_file, time_series_file, extra_dim_vals, + time_index, block_indices, outType, sign=1, + topo_dim=None, topo_cell_indices=None, nTopoLevels=None): # {{{ + + def read_field_with_dims(field_var, dim_vals, temp_shape, outType, + index_arrays): # {{{ temp_field = numpy.zeros(temp_shape, dtype=outType) inDims = len(dim_vals) if inDims <= 0 or inDims > 5: - print 'reading field %s with %s dimensions not supported.'%(var_name, inDims) + print('reading field %s with %s dimensions not supported.' + '' % (var_name, inDims)) sys.exit(1) if inDims == 1: @@ -723,30 +1011,38 @@ def read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays) dim_vals[4]], dtype=outType) + if topo_dim is not None and topo_dim in field_var.dimensions: + if len(temp_field.shape) != 2: + raise ValueError('Field with dimensions {} not supported in ' + 'topogrpahy extraction mode.'.format( + field_var.dimensions)) + # sample the depth-dependent field at the index of the topography + temp_field = temp_field[numpy.arange(temp_field.shape[0]), + topo_cell_indices] + outDims = len(temp_field.shape) if outDims <= 0 or outDims > 4: - print 'something went wrong reading field %s, resulting in a temp array with %s dimensions.'%(var_name, outDims) + print('something went wrong reading field %s, resulting in a temp ' + 'array with %s dimensions.' % (var_name, outDims)) sys.exit(1) block_indices = numpy.arange(temp_field.shape[0]) if outDims == 1: field = temp_field elif outDims == 2: - field = temp_field[block_indices,index_arrays[0]] + field = temp_field[block_indices, index_arrays[0]] elif outDims == 3: - field = temp_field[block_indices,index_arrays[0],index_arrays[1]] + field = temp_field[block_indices, index_arrays[0], index_arrays[1]] elif outDims == 4: - field = temp_field[block_indices,index_arrays[0],index_arrays[1],index_arrays[2]] - - return field - + field = temp_field[block_indices, index_arrays[0], index_arrays[1], + index_arrays[2]] -#}}} + return field # }}} field_var = get_var(var_name, mesh_file, time_series_file) try: missing_val = field_var.missing_value - except: + except AttributeError: missing_val = -9999999790214767953607394487959552.000000 dim_vals = [] @@ -757,12 +1053,14 @@ def read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays) index_arrays = [] for i in range(field_var.ndim): - dim = field_var.dimensions[i] + dim = field_var.dimensions[i] if dim == 'Time': dim_vals.append(time_index) elif dim in ['nCells', 'nEdges', 'nVertices']: dim_vals.append(block_indices) temp_shape = temp_shape + (len(block_indices),) + elif topo_dim is not None and dim == topo_dim: + dim_vals.append(numpy.arange(nTopoLevels)) else: extra_dim_val = extra_dim_vals[extra_dim_index] try: @@ -774,118 +1072,127 @@ def read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays) dim_vals.append(numpy.arange(shape[i])) temp_shape = temp_shape + (shape[i],) - index_array_var = get_var(extra_dim_val, mesh_file, time_series_file) + index_array_var = get_var(extra_dim_val, mesh_file, + time_series_file) # read the appropriate indices from the index_array_var - index_array = numpy.maximum(0,numpy.minimum(shape[i]-1, index_array_var[block_indices]-1)) + index_array = numpy.maximum(0, numpy.minimum( + shape[i]-1, index_array_var[block_indices]-1)) index_arrays.append(index_array) extra_dim_index += 1 - field = read_field_with_dims(field_var, dim_vals, temp_shape, outType, index_arrays) field[field == missing_val] = numpy.nan - return sign*field -#}}} + return sign*field # }}} def compute_zInterface(minLevelCell, maxLevelCell, layerThicknessCell, - zMinCell, zMaxCell, dtype, cellsOnEdge=None):#{{{ + zMinCell, zMaxCell, dtype, cellsOnEdge=None): + # {{{ - (nCells,nLevels) = layerThicknessCell.shape + (nCells, nLevels) = layerThicknessCell.shape - cellMask = numpy.ones((nCells,nLevels), bool) + cellMask = numpy.ones((nCells, nLevels), bool) for iLevel in range(nLevels): if minLevelCell is not None: - cellMask[:,iLevel] = numpy.logical_and(cellMask[:,iLevel], iLevel >= minLevelCell) + cellMask[:, iLevel] = numpy.logical_and(cellMask[:, iLevel], + iLevel >= minLevelCell) if maxLevelCell is not None: - cellMask[:,iLevel] = numpy.logical_and(cellMask[:,iLevel], iLevel <= maxLevelCell) + cellMask[:, iLevel] = numpy.logical_and(cellMask[:, iLevel], + iLevel <= maxLevelCell) - zInterfaceCell = numpy.zeros((nCells,nLevels+1),dtype=dtype) + zInterfaceCell = numpy.zeros((nCells, nLevels+1), dtype=dtype) for iLevel in range(nLevels): - zInterfaceCell[:,iLevel+1] = (zInterfaceCell[:,iLevel] - + cellMask[:,iLevel]*layerThicknessCell[:,iLevel]) + zInterfaceCell[:, iLevel+1] = \ + zInterfaceCell[:, iLevel] \ + + cellMask[:, iLevel]*layerThicknessCell[:, iLevel] if zMinCell is not None: minLevel = minLevelCell.copy() minLevel[minLevel < 0] = nLevels-1 - zOffsetCell = zMinCell - zInterfaceCell[numpy.arange(0,nCells),minLevel] + zOffsetCell = zMinCell - zInterfaceCell[numpy.arange(0, nCells), + minLevel] else: - zOffsetCell = zMaxCell - zInterfaceCell[numpy.arange(0,nCells),maxLevelCell+1] + zOffsetCell = zMaxCell - zInterfaceCell[numpy.arange(0, nCells), + maxLevelCell+1] for iLevel in range(nLevels+1): - zInterfaceCell[:,iLevel] += zOffsetCell + zInterfaceCell[:, iLevel] += zOffsetCell if cellsOnEdge is None: - return zInterfaceCell + return zInterfaceCell else: nEdges = cellsOnEdge.shape[0] - zInterfaceEdge = numpy.zeros((nEdges,nLevels+1),dtype=dtype) + zInterfaceEdge = numpy.zeros((nEdges, nLevels+1), dtype=dtype) # Get a list of valid cells on edges and a mask of which are valid - cellsOnEdgeMask = numpy.logical_and(cellsOnEdge >= 0, cellsOnEdge < nCells) + cellsOnEdgeMask = numpy.logical_and(cellsOnEdge >= 0, + cellsOnEdge < nCells) cellIndicesOnEdge = [] - cellIndicesOnEdge.append(cellsOnEdge[cellsOnEdgeMask[:,0],0]) - cellIndicesOnEdge.append(cellsOnEdge[cellsOnEdgeMask[:,1],1]) + cellIndicesOnEdge.append(cellsOnEdge[cellsOnEdgeMask[:, 0], 0]) + cellIndicesOnEdge.append(cellsOnEdge[cellsOnEdgeMask[:, 1], 1]) for iLevel in range(nLevels): edgeMask = numpy.zeros(nEdges, bool) layerThicknessEdge = numpy.zeros(nEdges, float) denom = numpy.zeros(nEdges, float) for index in range(2): - mask = cellsOnEdgeMask[:,index] + mask = cellsOnEdgeMask[:, index] cellIndices = cellIndicesOnEdge[index] - cellMaskLocal = cellMask[cellIndices,iLevel] + cellMaskLocal = cellMask[cellIndices, iLevel] - edgeMask[mask] = numpy.logical_or(edgeMask[mask], cellMaskLocal) + edgeMask[mask] = numpy.logical_or(edgeMask[mask], + cellMaskLocal) - layerThicknessEdge[mask] += cellMaskLocal*layerThicknessCell[cellIndices,iLevel] + layerThicknessEdge[mask] += \ + cellMaskLocal*layerThicknessCell[cellIndices, iLevel] denom[mask] += 1.0*cellMaskLocal layerThicknessEdge[edgeMask] /= denom[edgeMask] - zInterfaceEdge[:,iLevel+1] = (zInterfaceEdge[:,iLevel] - + edgeMask*layerThicknessEdge) + zInterfaceEdge[:, iLevel+1] = (zInterfaceEdge[:, iLevel] + + edgeMask*layerThicknessEdge) if zMinCell is not None: refLevelEdge = numpy.zeros(nEdges, int) for index in range(2): - mask = cellsOnEdgeMask[:,index] + mask = cellsOnEdgeMask[:, index] cellIndices = cellIndicesOnEdge[index] - refLevelEdge[mask] = numpy.maximum(refLevelEdge[mask], minLevel[cellIndices]) + refLevelEdge[mask] = numpy.maximum(refLevelEdge[mask], + minLevel[cellIndices]) else: refLevelEdge = (nLevels-1)*numpy.ones(nEdges, int) for index in range(2): - mask = cellsOnEdgeMask[:,index] + mask = cellsOnEdgeMask[:, index] cellIndices = cellIndicesOnEdge[index] - refLevelEdge[mask] = numpy.minimum(refLevelEdge[mask], maxLevelCell[cellIndices]+1) - + refLevelEdge[mask] = numpy.minimum(refLevelEdge[mask], + maxLevelCell[cellIndices]+1) zOffsetEdge = numpy.zeros(nEdges, float) # add the average of zInterfaceCell at each adjacent cell denom = numpy.zeros(nEdges, float) for index in range(2): - mask = cellsOnEdgeMask[:,index] + mask = cellsOnEdgeMask[:, index] cellIndices = cellIndicesOnEdge[index] - zOffsetEdge[mask] += zInterfaceCell[cellIndices,refLevelEdge[mask]] + zOffsetEdge[mask] += zInterfaceCell[cellIndices, + refLevelEdge[mask]] denom[mask] += 1.0 mask = denom > 0. zOffsetEdge[mask] /= denom[mask] # subtract the depth of zInterfaceEdge at the level of the bottom - zOffsetEdge -= zInterfaceEdge[numpy.arange(nEdges),refLevelEdge] + zOffsetEdge -= zInterfaceEdge[numpy.arange(nEdges), refLevelEdge] for iLevel in range(nLevels+1): - zInterfaceEdge[:,iLevel] += zOffsetEdge - - return (zInterfaceCell, zInterfaceEdge) + zInterfaceEdge[:, iLevel] += zOffsetEdge -#}}} + return (zInterfaceCell, zInterfaceEdge) # }}} def _build_location_list_xyz(nc_file, suffix, output_32bit, lonlat): # {{{ @@ -913,7 +1220,6 @@ def _fix_lon_lat_vertices(vertices, verticesOnCell, validVertices, nCells = verticesOnCell.shape[0] nVertices = len(vertices[0]) - xVertex = vertices[0] xVertex = vertices[0] xDiff = xVertex[verticesOnCell] - lonCell.reshape(nCells, 1) @@ -950,4 +1256,66 @@ def _fix_lon_lat_vertices(vertices, verticesOnCell, validVertices, return vertices, verticesOnCell # }}} + +def _fix_periodic_vertices(vertices, verticesOnCell, validVertices, + xCell, yCell, xperiod, yperiod): # {{{ + + vertices, verticesOnCell = _fix_periodic_vertices_1D( + vertices, verticesOnCell, validVertices, xCell, xperiod, dim=0) + vertices, verticesOnCell = _fix_periodic_vertices_1D( + vertices, verticesOnCell, validVertices, yCell, yperiod, dim=1) + + return vertices, verticesOnCell # }}} + + +def _fix_periodic_vertices_1D(vertices, verticesOnCell, validVertices, + coordCell, coordPeriod, dim): # {{{ + + nCells = verticesOnCell.shape[0] + nVertices = len(vertices[0]) + + coordVertex = vertices[dim] + + coordDiff = coordVertex[verticesOnCell] - coordCell.reshape(nCells, 1) + + # which cells have vertices that are out of range? + coordOutOfRange = numpy.logical_and( + validVertices, + numpy.logical_or(coordDiff > coordPeriod / 2.0, + coordDiff < -coordPeriod / 2.0)) + + coordCellsOutOfRange = numpy.any(coordOutOfRange, axis=1) + + coordValid = validVertices[coordCellsOutOfRange, :] + + coordVerticesToChange = numpy.zeros(verticesOnCell.shape, bool) + coordVerticesToChange[coordCellsOutOfRange, :] = coordValid + + coordDiff = coordDiff[coordCellsOutOfRange, :][coordValid] + coordVOC = verticesOnCell[coordCellsOutOfRange, :][coordValid] + + coordNVerticesToAdd = numpy.count_nonzero(coordValid) + + coordVerticesToAdd = numpy.arange(coordNVerticesToAdd) + nVertices + coordV = coordVertex[coordVOC] + verticesOnCell[coordVerticesToChange] = coordVerticesToAdd + + # need to shift points outside periodic domain (assumes that mesh is only + # within one period) can use mod if this is not the case in general + coordMask = coordDiff > coordPeriod / 2.0 + coordV[coordMask] -= coordPeriod + coordMask = coordDiff < -coordPeriod / 2.0 + coordV[coordMask] += coordPeriod + + outVertices = [] + for outDim in range(3): + if outDim == dim: + outVertices.append(numpy.append(vertices[outDim], coordV)) + else: + outVertices.append(numpy.append(vertices[outDim], + vertices[outDim][coordVOC])) + + return tuple(outVertices), verticesOnCell # }}} + + # vim: set expandtab: diff --git a/conda_package/recipe/build.sh b/conda_package/recipe/build.sh new file mode 100644 index 000000000..f88a71de3 --- /dev/null +++ b/conda_package/recipe/build.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -x +set -e + +cp -r ocean landice visualization mesh_tools conda_package + +cd conda_package +${PYTHON} -m pip install . --no-deps -vv + +cd mesh_tools/mesh_conversion_tools + +# build and install JIGSAW +mkdir build +cd build +cmake -DCMAKE_INSTALL_PREFIX=${PREFIX} -DCMAKE_BUILD_TYPE=Release .. +make +make install + diff --git a/conda_package/recipe/conda_build_config.yaml b/conda_package/recipe/conda_build_config.yaml new file mode 100644 index 000000000..a024c5f34 --- /dev/null +++ b/conda_package/recipe/conda_build_config.yaml @@ -0,0 +1,10 @@ +channel_sources: + - conda-forge,defaults + +channel_targets: + - conda-forge main + +python: + - 3.7 + - 3.6 + - 2.7 diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml new file mode 100644 index 000000000..0be0e9172 --- /dev/null +++ b/conda_package/recipe/meta.yaml @@ -0,0 +1,95 @@ +{% set name = "mpas_tools" %} +{% set version = "0.0.4" %} + +package: + name: '{{ name|lower }}' + version: '{{ version }}' + +source: + path: ../.. + +build: + number: 0 + entry_points: + - planar_hex = mpas_tools.planar_hex:main + - translate_planar_grid = mpas_tools.translate:main + - merge_grids = mpas_tools.merge_grids:main + - split_grids = mpas_tools.split_grids:main + +requirements: + build: + - {{ compiler('cxx') }} + - cmake + host: + - python + - netcdf4 + - hdf5 + - libnetcdf + - setuptools + - netcdf4 + - openmp # [osx] + + run: + - python + - netcdf4 + - hdf5 + - libnetcdf + - numpy + - scipy + - xarray + - geometric_features + - pyevtk + - future + - backports.tempfile + +test: + requires: + - pytest + source_files: + - mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson + - mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc + - mesh_tools/mesh_conversion_tools/test/land_mask_final.nc + - conda_package/mpas_tools/tests/* + commands: + - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x20_1km.nc' + - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -x 1000. -y 2000. + - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -c + - planar_hex --nx=20 --ny=40 --dc=1000. --outFileName='periodic_mesh_20x40_1km.nc' + - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -d 'periodic_mesh_20x40_1km.nc' + - MpasMeshConverter.x mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc mesh.nc + - MpasCellCuller.x mesh.nc culled_mesh.nc -m mesh_tools/mesh_conversion_tools/test/land_mask_final.nc + - MpasMaskCreator.x mesh.nc arctic_mask.nc -f mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson + - planar_hex --nx=30 --ny=20 --dc=1000. --npx --npy --outFileName='nonperiodic_mesh_30x20_1km.nc' + - MpasCellCuller.x nonperiodic_mesh_30x20_1km.nc culled_nonperiodic_mesh_30x20_1km.nc + - python -m pytest conda_package/mpas_tools/tests + - mark_horns_for_culling.py --help + - set_lat_lon_fields_in_planar_grid.py --help + - create_SCRIP_file_from_MPAS_mesh.py --help + - create_landice_grid_from_generic_MPAS_grid.py --help + - define_cullMask.py --help + - interpolate_to_mpasli_grid.py --help + - mark_domain_boundaries_dirichlet.py --help + - add_land_locked_cells_to_mask.py --help + - widen_transect_edge_masks.py --help + - add_critical_land_blockages_to_mask.py --help + - moc_southern_boundary_extractor.py --help + - paraview_vtk_field_extractor.py -f mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc -v latCell,lonCell --ignore_time -o vtk_test + - split_grids --help + - merge_grids --help + +about: + home: https://github.com/MPAS-Dev/MPAS-Tools/ + license: BSD-3-Clause + license_family: BSD + license_file: LICENSE + summary: Mesh tools for Model for Prediction Across Scales (MPAS) + description: | + A set of tools for creating and manipulating meshes for the climate + components based on the Model for Prediction Across Scales (MPAS) framework + doc_url: 'https://github.com/MPAS-Dev/MPAS-Tools/blob/master/README.md' + dev_url: 'https://github.com/MPAS-Dev/MPAS-Tools/' + +extra: + recipe-maintainers: + - xylar + - jhkennedy diff --git a/conda_package/setup.py b/conda_package/setup.py new file mode 100755 index 000000000..82b57b3d9 --- /dev/null +++ b/conda_package/setup.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python + +import os +import re +from setuptools import setup, find_packages +import shutil + +here = os.path.abspath(os.path.dirname(__file__)) +with open(os.path.join(here, 'mpas_tools', '__init__.py')) as f: + init_file = f.read() + +version = re.search(r'{}\s*=\s*[(]([^)]*)[)]'.format('__version_info__'), + init_file).group(1).replace(', ', '.') + +os.chdir(here) + +for path in ['ocean', 'landice', 'visualization', 'mesh_tools']: + if not os.path.exists(path): + shutil.copytree('../{}'.format(path), './{}'.format(path)) + +setup(name='mpas_tools', + version=version, + description='A set of tools for creating and manipulating meshes for the' + ' climate components based on the Model for Prediction ' + 'Across Scales (MPAS) framework', + url='https://github.com/MPAS-Dev/MPAS-Tools', + author='MPAS-Analysis Developers', + author_email='mpas-developers@googlegroups.com', + license='BSD', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'License :: OSI Approved :: BSD License', + 'Operating System :: OS Independent', + 'Intended Audience :: Science/Research', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Topic :: Scientific/Engineering', + ], + packages=find_packages(), + package_data={}, + scripts=['mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py', + 'mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py', + 'mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py', + 'landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py', + 'landice/mesh_tools_li/define_cullMask.py', + 'landice/mesh_tools_li/interpolate_to_mpasli_grid.py', + 'landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py', + 'ocean/coastline_alteration/add_land_locked_cells_to_mask.py', + 'ocean/coastline_alteration/widen_transect_edge_masks.py', + 'ocean/coastline_alteration/add_critical_land_blockages_to_mask.py', + 'ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py', + 'visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py'], + install_requires=['numpy', 'xarray', 'netCDF4', 'pyevtk'], + entry_points={'console_scripts': + ['planar_hex = mpas_tools.planar_hex:main', + 'translate_planar_grid = mpas_tools.translate:main', + 'merge_grids = mpas_tools.merge_grids:main', + 'split_grids = mpas_tools.split_grids:main']}) diff --git a/grid_gen/ascii_netcdf_packager/.gitignore b/grid_gen/ascii_netcdf_packager/.gitignore deleted file mode 100644 index 98844dc60..000000000 --- a/grid_gen/ascii_netcdf_packager/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.x -*.nc diff --git a/grid_gen/ascii_netcdf_packager/Makefile b/grid_gen/ascii_netcdf_packager/Makefile deleted file mode 100644 index 154cc06d8..000000000 --- a/grid_gen/ascii_netcdf_packager/Makefile +++ /dev/null @@ -1,11 +0,0 @@ -CXX = g++ -CPPFLAGS = -I${NETCDF}/include -CXXFLAGS = -O3 -LIBS = -L${NETCDF}/lib -lnetcdf -lnetcdf_c++ -EXE = AsciiNetCDFPackager.x - -all: - $(CXX) $(CPPFLAGS) ascii_to_netcdf_packager.cpp $(CXXFLAGS) $(LIBS) -o $(EXE) - -clean: - rm -f $(EXE) diff --git a/grid_gen/ascii_netcdf_packager/README b/grid_gen/ascii_netcdf_packager/README deleted file mode 100644 index 57205468a..000000000 --- a/grid_gen/ascii_netcdf_packager/README +++ /dev/null @@ -1,39 +0,0 @@ -Readme for ascii_to_netcdf_packager.cpp - -Author: Doug Jacobsen - -Purpose: - ascii_to_netcdf_packager.cpp is a piece of software designed create a NetCDF - file that contains enough information to build an MPAS mesh from. - -Requirements: - ascii_to_netcdf_packager.cpp requires the c++ netcdf libraries to be able to read/write NetCDF files. - It has been tested using g++ version 4.8.1 - -Usage of ascii_to_netcdf_packager.cpp: - ./AsciiNetCDFPackager.x [mod] - - Input options are: - [mod] (Optional) -- This allows the output mesh name to be modified - from grid.N.nc to grid.mod.N.nc - - This program reads three ascii files from the current directory: - * end_points.dat - This file should contain the x, y, and z coordinates - for every cell center in the mesh. Each row is a - point, and the columns are order x y z. - - * triangles.dat - This file contains the indices for cells that make up - each triangle. Each row is a triangle listing the - indices for each cell that is a vertex of the - triangle. Each column is an index of a triangle - vertex. - - * point_density.dat - This file contains the value of the density - function evaluated at each cell center. - - Upon completion, this program will create a new file named grid.N.nc that - contains enough information to build an MPAS mesh using the - MpasMeshConverter.x program. - - In the file name, N will be replaced with the number of cells in the mesh. - diff --git a/grid_gen/ascii_netcdf_packager/ascii_to_netcdf_packager.cpp b/grid_gen/ascii_netcdf_packager/ascii_to_netcdf_packager.cpp deleted file mode 100644 index 2c6a4e527..000000000 --- a/grid_gen/ascii_netcdf_packager/ascii_to_netcdf_packager.cpp +++ /dev/null @@ -1,663 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#define ID_LEN 10 - -using namespace std; -//using namespace tr1; - -int nCells, nVertices, vertexDegree; -bool spherical=false; -double sphereRadius=1.0; -int connectivityBase; -string in_history = ""; -string in_file_id = ""; - -// Connectivity and location information {{{ - -vector xCell, yCell, zCell; -vector xVertex, yVertex, zVertex; -vector< vector > cellsOnVertex; -vector meshDensity; - -// }}} - -// Iterators {{{ -vector::iterator int_itr; -vector< vector >::iterator vec_int_itr; -vector< vector >::iterator vec_dbl_itr; -vector::iterator dbl_itr; -// }}} - -/* Building/Ordering functions {{{ */ -int readGridInput(const string inputFilename); -int buildVertices(); -/*}}}*/ - -/* Output functions {{{*/ -int outputGridDimensions(const string outputFilename); -int outputGridAttributes(const string outputFilename, const string inputFilename); -int outputGridCoordinates(const string outputFilename); -int outputVertexConnectivity(const string outputFilename); -int outputMeshDensity(const string outputFilename); -/*}}}*/ - -/* Utility functions {{{*/ -int circumcenter(double x1, double y1, double z1, double x2, double y2, double z2, double x3, double y3, double z3, double *cx, double *cy, double *cz); -int isCCW(double x1, double y1, double z1, double x2, double y2, double z2, double x3, double y3, double z3); -/*}}}*/ - -string gen_random(const int len); - -int main ( int argc, char *argv[] ) { - int error; - ostringstream out_name_stream; - string out_name; - string in_name = "grid.nc"; - - cout << endl << endl; - cout << "************************************************************" << endl; - cout << "ASCII_TO_NETCDF_PACKAGER:\n"; - cout << " C++ version\n"; - cout << " Convert a set of ascii files describing a grid into a NetCDF file describing the same grid.\n"; - cout << " Requires cell information, and connectivity of the dual grid. Along with density values of each cell.\n"; - cout << endl << endl; - cout << " Compiled on " << __DATE__ << " at " << __TIME__ << ".\n"; - cout << "************************************************************" << endl; - cout << endl << endl; - - srand(time(NULL)); - - cout << "Reading input grid." << endl; - error = readGridInput(in_name); - if(error) return 1; - - - if ( argc > 1 ) { - out_name_stream << "grid." << argv[1] << "." << nCells << ".nc"; - } else { - out_name_stream << "grid." << nCells << ".nc"; - } - out_name = out_name_stream.str(); - - cout << "Building veritces." << endl; - error = buildVertices(); - if(error) return 1; - - cout << endl << "Writing file: " << out_name << endl << endl; - - cout << "Writing grid dimensions" << endl; - if(error = outputGridDimensions(out_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - cout << "Writing grid attributes" << endl; - if(error = outputGridAttributes(out_name, in_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - cout << "Writing grid coordinates" << endl; - if(error = outputGridCoordinates(out_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - cout << "Writing vertex connectivity" << endl; - if(error = outputVertexConnectivity(out_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - cout << "Reading and writing meshDensity" << endl; - if(error = outputMeshDensity(out_name)){ - cout << "Error - " << error << endl; - exit(error); - } - - return 0; -} - -/* Building/Ordering functions {{{ */ -int readGridInput(const string inputFilename){/*{{{*/ - double x, y, z; - ifstream cells("end_points.dat"); - ifstream dual_cells("triangles.dat"); - ifstream density("point_density.dat"); - string line; - vector *dual_cell; - int iVtx; - - double xRange[2], yRange[2], zRange[2]; - -#ifdef _DEBUG - cout << endl << endl << "Begin function: readGridInput" << endl << endl; -#endif - - xCell.clear(); - yCell.clear(); - zCell.clear(); - - xRange[0] = DBL_MAX; - xRange[1] = DBL_MIN; - yRange[0] = DBL_MAX; - yRange[1] = DBL_MIN; - zRange[0] = DBL_MAX; - zRange[1] = DBL_MIN; - - while(!cells.eof()){ - cells >> x >> y >> z; - - if(cells.good()){ - xRange[0] = min(xRange[0], x); - xRange[1] = max(xRange[1], x); - yRange[0] = min(yRange[0], y); - yRange[1] = max(yRange[1], y); - zRange[0] = min(zRange[0], z); - zRange[1] = max(zRange[1], z); - xCell.push_back(x); - yCell.push_back(y); - zCell.push_back(z); - } - } - cells.close(); - - if( fabs(xRange[1] - xRange[0]) > FLT_EPSILON && fabs(yRange[1] - yRange[0]) > FLT_EPSILON && fabs(zRange[1] - zRange[0]) > FLT_EPSILON ){ - spherical = true; - } - - if (spherical) { - sphereRadius = sqrt(xCell[0]*xCell[0] + yCell[0]*yCell[0] + zCell[0]*zCell[0]); - } - - cellsOnVertex.clear(); - connectivityBase = INT_MAX; - - nVertices = 0; - - for(std::string line; getline(dual_cells, line); ){ - nVertices++; - } - cellsOnVertex.resize(nVertices); - - dual_cells.close(); - dual_cells.open("triangles.dat"); - - iVtx = 0; - for(std::string line; getline(dual_cells, line); ){ - int start_idx = 0; - int count = 0; - for(int i = 0; i < line.length(); i++){ - count++; - if(line[i] == ' '){ - std::string idx = line.substr(start_idx, count); - - cellsOnVertex.at(iVtx).push_back( atoi(idx.c_str()) ); - - if (atoi(idx.c_str()) >= 0){ - connectivityBase = min(connectivityBase, atoi(idx.c_str())); - } - - count = 0; - start_idx = i; - } - } - - std::string last_idx = line.substr(start_idx); - cellsOnVertex.at(iVtx).push_back( atoi(last_idx.c_str()) ); - - if (atoi(last_idx.c_str()) >= 0){ - connectivityBase = min(connectivityBase, atoi(last_idx.c_str())); - } - - vertexDegree = cellsOnVertex.at(iVtx).size(); - iVtx++; - } - dual_cells.close(); - - meshDensity.clear(); - while(!density.eof()){ - double dens; - - density >> dens; - meshDensity.push_back(dens); - } - density.close(); - - nCells = xCell.size(); - nVertices = cellsOnVertex.size(); - - cout << "Read dimensions:" << endl; - cout << " nCells = " << xCell.size() << endl; - cout << " nVertices = " << cellsOnVertex.size() << endl; - cout << " vertexDegree = " << vertexDegree << endl; - cout << " Spherical? = " << spherical << endl; - cout << " Sphere Radius = " << sphereRadius << endl; - cout << " Connectivity base = " << connectivityBase << endl; - - cout << "" << endl; - cout << "X range: " << xRange[0] << " " << xRange[1] << endl; - cout << "Y range: " << yRange[0] << " " << yRange[1] << endl; - cout << "Z range: " << zRange[0] << " " << zRange[1] << endl; - - return 0; -}/*}}}*/ - -int buildVertices(){/*{{{*/ - double x, y, z, norm; - int v1, v2, v3; - - xVertex.clear(); - yVertex.clear(); - zVertex.clear(); - - for(int i = 0; i < cellsOnVertex.size(); i++){ - v1 = cellsOnVertex.at(i).at(0) - connectivityBase; - v2 = cellsOnVertex.at(i).at(1) - connectivityBase; - v3 = cellsOnVertex.at(i).at(2) - connectivityBase; - - if(!isCCW(xCell[v1], yCell[v1], zCell[v1], xCell[v2], yCell[v2], zCell[v2], xCell[v3], yCell[v3], zCell[v3])){ - v2 = cellsOnVertex.at(i).at(2) - connectivityBase; - v3 = cellsOnVertex.at(i).at(1) - connectivityBase; - } - - /* - cout << "Circumcenter of : " << v1 << " " << v2 << " " << v3 << endl; - cout << " 1 - " << xCell[v1] << " " << yCell[v1] << " " << zCell[v1] << endl; - cout << " 2 - " << xCell[v2] << " " << yCell[v2] << " " << zCell[v2] << endl; - cout << " 3 - " << xCell[v3] << " " << yCell[v3] << " " << zCell[v3] << endl; - // */ - - circumcenter(xCell[v1], yCell[v1], zCell[v1], - xCell[v2], yCell[v2], zCell[v2], - xCell[v3], yCell[v3], zCell[v3], - &x, &y, &z); - - if (spherical){ - norm = sqrt( x*x + y*y + z*z ); - x = (x / norm) * sphereRadius; - y = (y / norm) * sphereRadius; - z = (z / norm) * sphereRadius; - } - - xVertex.push_back(x); - yVertex.push_back(y); - zVertex.push_back(z); - } - - return 0; - -}/*}}}*/ -/*}}}*/ - -/* Output functions {{{*/ -int outputGridDimensions( const string outputFilename ){/*{{{*/ - /************************************************************************ - * - * This function writes the grid dimensions to the netcdf file named - * outputFilename - * - * **********************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Replace, NULL, 0, NcFile::Offset64Bits); - - int junk; - - nCells = xCell.size(); - - /* - for(vec_int_itr = edgesOnCell.begin(); vec_int_itr != edgesOnCell.end(); ++vec_int_itr){ - maxEdges = std::max(maxEdges, (int)(*vec_int_itr).size()); - }*/ - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - - // define dimensions - NcDim *nCellsDim; - NcDim *nEdgesDim; - NcDim *nVerticesDim; - NcDim *maxEdgesDim; - NcDim *maxEdges2Dim; - NcDim *TWODim; - NcDim *THREEDim; - NcDim *vertexDegreeDim; - NcDim *timeDim; - - // write dimensions - if (!(nCellsDim = grid.add_dim( "nCells", xCell.size()) )) return NC_ERR; - if (!(nVerticesDim = grid.add_dim( "nVertices", xVertex.size()) )) return NC_ERR; - if (!(TWODim = grid.add_dim( "TWO", 2) )) return NC_ERR; - if (!(vertexDegreeDim = grid.add_dim( "vertexDegree", vertexDegree) )) return NC_ERR; - if (!(timeDim = grid.add_dim( "Time") )) return NC_ERR; - - grid.close(); - - // file closed when file obj goes out of scope - return 0; -}/*}}}*/ -int outputGridAttributes( const string outputFilename, const string inputFilename ){/*{{{*/ - /************************************************************************ - * - * This function writes the grid dimensions to the netcdf file named - * outputFilename - * - * **********************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Write); - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - NcBool sphereAtt, radiusAtt; - NcBool history, id, spec, conventions, source, periodic; - string history_str = ""; - string id_str = ""; - string parent_str =""; - - // write attributes - if(!spherical){ - if (!(sphereAtt = grid.add_att( "on_a_sphere", "NO\0"))) return NC_ERR; - if (!(radiusAtt = grid.add_att( "sphere_radius", 1.0))) return NC_ERR; - } else { - if (!(sphereAtt = grid.add_att( "on_a_sphere", "YES\0"))) return NC_ERR; - if (!(radiusAtt = grid.add_att( "sphere_radius", sphereRadius))) return NC_ERR; - } - - history_str += "AsciiToNetCDFPackager.x "; - if(in_history != ""){ - history_str += "\n"; - history_str += in_history; - } - - id_str = gen_random(ID_LEN); - - if (!(history = grid.add_att( "history", history_str.c_str() ))) return NC_ERR; - if (!(conventions = grid.add_att( "Conventions", "MPAS" ))) return NC_ERR; - if (!(source = grid.add_att( "source", "MpasMeshConverter.x" ))) return NC_ERR; - if (!(id = grid.add_att( "file_id", id_str.c_str() ))) return NC_ERR; - if (!(periodic = grid.add_att( "is_periodic", "NO\0" ))) return NC_ERR; - - grid.close(); - - // file closed when file obj goes out of scope - return 0; -}/*}}}*/ -int outputGridCoordinates( const string outputFilename) {/*{{{*/ - /************************************************************************ - * - * This function writes the grid coordinates to the netcdf file named - * outputFilename - * This includes all cell centers, vertices, and edges. - * Both cartesian and lat,lon, as well as all of their indices - * - * **********************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Write); - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - - // fetch dimensions - NcDim *nCellsDim = grid.get_dim( "nCells" ); - NcDim *nVerticesDim = grid.get_dim( "nVertices" ); - - int nCells = nCellsDim->size(); - int nVertices = nVerticesDim->size(); - - //Define nc variables - NcVar *xCellVar, *yCellVar, *zCellVar, *xVertexVar, *yVertexVar, *zVertexVar; - - int i; - - // Build and write cell coordinate arrays - cout << "Writing xcell" << endl; - if (!(xCellVar = grid.add_var("xCell", ncDouble, nCellsDim))) return NC_ERR; - if (!xCellVar->put(&xCell[0],nCells)) return NC_ERR; - cout << "Writing ycell" << endl; - if (!(yCellVar = grid.add_var("yCell", ncDouble, nCellsDim))) return NC_ERR; - if (!yCellVar->put(&yCell[0],nCells)) return NC_ERR; - cout << "Writing zcell" << endl; - if (!(zCellVar = grid.add_var("zCell", ncDouble, nCellsDim))) return NC_ERR; - if (!zCellVar->put(&zCell[0],nCells)) return NC_ERR; - - //Build and write vertex coordinate arrays - cout << "Writing xvertex" << endl; - if (!(xVertexVar = grid.add_var("xVertex", ncDouble, nVerticesDim))) return NC_ERR; - if (!xVertexVar->put(&xVertex[0],nVertices)) return NC_ERR; - cout << "Writing yvertex" << endl; - if (!(yVertexVar = grid.add_var("yVertex", ncDouble, nVerticesDim))) return NC_ERR; - if (!yVertexVar->put(&yVertex[0],nVertices)) return NC_ERR; - cout << "Writing zvertex" << endl; - if (!(zVertexVar = grid.add_var("zVertex", ncDouble, nVerticesDim))) return NC_ERR; - if (!zVertexVar->put(&zVertex[0],nVertices)) return NC_ERR; - - grid.close(); - - return 0; -}/*}}}*/ -int outputVertexConnectivity( const string outputFilename) {/*{{{*/ - /***************************************************************** - * - * This function writes all of the *OnVertex arrays. Including - * cellsOnVertex - * - * ***************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Write); - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - - // fetch dimensions - NcDim *nVerticesDim = grid.get_dim( "nVertices" ); - NcDim *vertexDegreeDim = grid.get_dim( "vertexDegree" ); - - // define nc variables - NcVar *covVar, *eovVar, *bdryVertVar; - - int nVertices = nVerticesDim->size(); - int vertexDegree = vertexDegreeDim->size(); - int i, j; - - int *tmp_arr; - - // Build and write COV array - tmp_arr = new int[nVertices*vertexDegree]; - - for(i = 0; i < nVertices; i++){ - for(j = 0; j < vertexDegree; j++){ - tmp_arr[i*vertexDegree + j] = 0; - } - } - - i = 0; - for(vec_int_itr = cellsOnVertex.begin(); vec_int_itr != cellsOnVertex.end(); ++vec_int_itr){ - j = 0; - for(int_itr = (*vec_int_itr).begin(); int_itr != (*vec_int_itr).end(); ++int_itr){ - tmp_arr[i*vertexDegree + j] = (*int_itr) - connectivityBase + 1; - j++; - } - i++; - } - - if (!(covVar = grid.add_var("cellsOnVertex", ncInt, nVerticesDim, vertexDegreeDim))) return NC_ERR; - if (!covVar->put(tmp_arr,nVertices,vertexDegree)) return NC_ERR; - - cellsOnVertex.clear(); - - return 0; -}/*}}}*/ -int outputMeshDensity( const string outputFilename) {/*{{{*/ - /*************************************************************************** - * - * This function writes the meshDensity variable. Read in from the file SaveDensity - * - * *************************************************************************/ - // Return this code to the OS in case of failure. - static const int NC_ERR = 2; - - // set error behaviour (matches fortran behaviour) - NcError err(NcError::verbose_nonfatal); - - // open the scvtmesh file - NcFile grid(outputFilename.c_str(), NcFile::Write); - - // check to see if the file was opened - if(!grid.is_valid()) return NC_ERR; - - // fetch dimensions - NcDim *nCellsDim = grid.get_dim( "nCells" ); - - NcVar *cDensVar; - - int nCells = nCellsDim->size(); - int i, j, k; - int junk_int; - double junk_dbl; - - vector dbl_tmp_arr; - - //Write meshDensity - if (!(cDensVar = grid.add_var("meshDensity", ncDouble, nCellsDim))) return NC_ERR; - if (!cDensVar->put(&meshDensity.at(0),nCells)) return NC_ERR; - - return 0; -}/*}}}*/ -/*}}}*/ - -string gen_random(const int len) {/*{{{*/ - static const char alphanum[] = - "0123456789" -// "ABCDEFGHIJKLMNOPQRSTUVWXYZ" - "abcdefghijklmnopqrstuvwxyz"; - - string rand_str = ""; - - for (int i = 0; i < len; ++i) { - rand_str += alphanum[rand() % (sizeof(alphanum) - 1)]; - } - - return rand_str; -}/*}}}*/ - -int circumcenter(double x1, double y1, double z1, double x2, double y2, double z2, double x3, double y3, double z3, double *cx, double *cy, double *cz){/*{{{*/ - - if(spherical){ - double a, b, c, pbc, apc, abp; - double bottom; - double x23, y23, z23; - double x31, y31, z31; - double x12, y12, z12; - - x23 = x2 - x3; - y23 = y2 - y3; - z23 = z2 - z3; - - x31 = x3 - x1; - y31 = y3 - y1; - z31 = z3 - z1; - - x12 = x1 - x2; - y12 = y1 - y2; - z12 = z1 - z2; - - a = pow(x23, 2) + pow(y23, 2) + pow(z23, 2); - b = pow(x31, 2) + pow(y31, 2) + pow(z31, 2); - c = pow(x12, 2) + pow(y12, 2) + pow(z12, 2); -// cout << " ABC: " << a << " " << b << " " << c << endl; - - pbc = a*(-a + b + c); - apc = b*( a - b + c); - abp = c*( a + b - c); - - bottom = pbc + apc + abp; - - *cx = (pbc * x1 + apc * x2 + abp * x3) / bottom; - *cy = (pbc * y1 + apc * y2 + abp * y3) / bottom; - *cz = (pbc * z1 + apc * z2 + abp * z3) / bottom; - } else { - double d; - - d = 2.0 * ( x1 * (y2 - y3) + x2 * (y3 - y1) + x3 * (y1 - y2)); - - *cx = (( powf(x1, 2) + powf(y1, 2) ) * (y2 - y3) + ( powf(x2, 2) + powf(y2, 2) ) * (y3 - y1) + ( powf(x3, 2) + powf(y3, 2) ) * (y1 - y2)) / d; - *cy = (( powf(x1, 2) + powf(y1, 2) ) * (x3 - x2) + ( powf(x2, 2) + powf(y2, 2) ) * (x1 - x3) + ( powf(x3, 2) + powf(y3, 2) ) * (x2 - x1)) / d; - *cz = 0.0; - } - - return 0; -}/*}}}*/ - -int isCCW(double x1, double y1, double z1, double x2, double y2, double z2, double x3, double y3, double z3){/*{{{*/ - double nx, ny, nz; - double ux, uy, uz; - double vx, vy, vz; - double cx, cy, cz; - double dot; - - if (spherical){ - nx = x1; - ny = y1; - nz = z1; - } else { - nx = 0.0; - ny = 0.0; - nz = 1.0; - } - - ux = x2 - x1; - uy = y2 - y1; - uz = z2 - z1; - vx = x3 - x1; - vy = y3 - y1; - vz = z3 - z1; - - cx = uy * vz - uz * vy; - cy = uz * vx - ux * vz; - cz = ux * vy - uy * vx; - - dot = cx * nx + cy * ny + cz * nz; - - if (dot > 0.0) { - return 1; - } else { - return 0; - } -}/*}}}*/ diff --git a/grid_gen/basin/.gitignore b/grid_gen/basin/.gitignore deleted file mode 100644 index 2dee6bde4..000000000 --- a/grid_gen/basin/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -# Ignore all netcdf files and graph files -fort* -map -*.mod -*.f90 - -# Ignore all PNGs -*.png - -# Each tool should also have it's own .gitignore file that ignores the build files for that tool. diff --git a/grid_gen/basin/Namelists/namelist.DOME_3D_overflow b/grid_gen/basin/Namelists/namelist.DOME_3D_overflow deleted file mode 100644 index c98ff3bf6..000000000 --- a/grid_gen/basin/Namelists/namelist.DOME_3D_overflow +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 25 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'DOME_3D_overflow' - initial_conditions = 'DOME_3D_overflow' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .true. - top_layers_without_land = 3 - layer_thickness_total_max = 3600.0 - f0 = 1.0e-4 - beta = 0.0 - omega = 0.0 - Lx = 2000.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.Ilicak2_overflow b/grid_gen/basin/Namelists/namelist.Ilicak2_overflow deleted file mode 100644 index ac7547c55..000000000 --- a/grid_gen/basin/Namelists/namelist.Ilicak2_overflow +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 100 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'Ilicak2_overflow' - initial_conditions = 'Ilicak2_overflow' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 2000.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 4.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.Ilicak2_overflow_sigma b/grid_gen/basin/Namelists/namelist.Ilicak2_overflow_sigma deleted file mode 100644 index 8e6bb9c3c..000000000 --- a/grid_gen/basin/Namelists/namelist.Ilicak2_overflow_sigma +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 100 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'Ilicak2_overflow_sigma' - initial_conditions = 'Ilicak2_overflow_sigma' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_total_max = 2000.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 4.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.SOMA b/grid_gen/basin/Namelists/namelist.SOMA deleted file mode 100644 index 559b41eea..000000000 --- a/grid_gen/basin/Namelists/namelist.SOMA +++ /dev/null @@ -1,31 +0,0 @@ -&basin - nVertLevelsMOD = 40 - on_a_sphere = 'YES' - expand_from_unit_sphere = .true. - sphere_radius = 6.37122e6 - zLevel_thickness = 'SOMA_40_Level' - bottom_topography = 'SOMA_Circular_Basin' - initial_conditions = 'SOMA_TS' - eliminate_inland_seas=.true. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - - ! These variables may be used for acc wind amplification - amplify_acc_wind = .false. - amp_wind_factor = 2.0 - amp_wind_center_lat = -35.0 - amp_wind_spread_lat = 3.0 - - ! These variables are not needed for realistic global topography: - - ! layer_total_max = 2000.0 - ! f0 = -1.1e-4 - ! beta = 1.4e-11 - ! omega = 7.29212e-5 - ! Lx = 3200.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.aquaplanet_one_layer b/grid_gen/basin/Namelists/namelist.aquaplanet_one_layer deleted file mode 100644 index 0d614f412..000000000 --- a/grid_gen/basin/Namelists/namelist.aquaplanet_one_layer +++ /dev/null @@ -1,29 +0,0 @@ -&basin - nVertLevelsMOD = 1 - on_a_sphere = 'YES' - expand_from_unit_sphere = .true. - sphere_radius = 1.0 - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'uniform_TS' - uniform_T = 10.0 - uniform_S = 34.0 - uniform_tracer1 = 1.0 - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_total_max = 3000.0 - - ! These variables are not needed for aquaplanet. - ! u,f,h are initialized in the code. - - f0 = 0.0 - beta = 0.0 - omega = 0.0 - ! Lx = 3200.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.baroclinic_channel_Ilicak4 b/grid_gen/basin/Namelists/namelist.baroclinic_channel_Ilicak4 deleted file mode 100644 index 8815f57fb..000000000 --- a/grid_gen/basin/Namelists/namelist.baroclinic_channel_Ilicak4 +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'baroclinic_channel_Ilicak4' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1000.0 - f0 = -1.2e-4 - beta = 0.0 - omega = 0.0 - Lx = 160.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.global_realistic b/grid_gen/basin/Namelists/namelist.global_realistic deleted file mode 100644 index a905b5cc4..000000000 --- a/grid_gen/basin/Namelists/namelist.global_realistic +++ /dev/null @@ -1,31 +0,0 @@ -&basin - nVertLevelsMOD = 40 - on_a_sphere = 'YES' - expand_from_unit_sphere = .true. - sphere_radius = 6.37122e6 - zLevel_thickness = 'POP_40_zLevel' - bottom_topography = 'realistic_ETOPO' - initial_conditions = 'realistic_WOCE' - eliminate_inland_seas=.true. - load_woce_IC = .true. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - - ! These variables may be used for acc wind amplification - ! amplify_acc_wind = .false. - ! amp_wind_factor = 2.0 - ! amp_wind_center_lat = -35.0 - ! amp_wind_spread_lat = 3.0 - - ! These variables are not needed for realistic global topography: - - ! layer_total_max = 2000.0 - ! f0 = -1.1e-4 - ! beta = 1.4e-11 - ! omega = 7.29212e-5 - ! Lx = 3200.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.internal_wave_Ilicak3 b/grid_gen/basin/Namelists/namelist.internal_wave_Ilicak3 deleted file mode 100644 index dcaee8973..000000000 --- a/grid_gen/basin/Namelists/namelist.internal_wave_Ilicak3 +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'internal_wave_Ilicak3' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 500.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 20.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.isopycnal_channel b/grid_gen/basin/Namelists/namelist.isopycnal_channel deleted file mode 100644 index c3139c1f8..000000000 --- a/grid_gen/basin/Namelists/namelist.isopycnal_channel +++ /dev/null @@ -1,21 +0,0 @@ -&basin - nVertLevelsMOD = 3 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'isopycnal_3layer' - bottom_topography = 'flat_bottom' - initial_conditions = 'isopycnal_3layer' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - f0 = -1.1e-4 - beta = 1.4e-11 - omega = 0.0 - Lx = 2048.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.lock_exchange_Ilicak1 b/grid_gen/basin/Namelists/namelist.lock_exchange_Ilicak1 deleted file mode 100644 index 529e8cceb..000000000 --- a/grid_gen/basin/Namelists/namelist.lock_exchange_Ilicak1 +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'lock_exchange_Ilicak1' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 20.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 4.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test1 b/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test1 deleted file mode 100644 index 679346941..000000000 --- a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test1 +++ /dev/null @@ -1,25 +0,0 @@ -&basin - nVertLevelsMOD = 22 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'sub_ice_shelf_test1' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1100.0 - f0 = -1.4e-4 - beta = 0.0 - omega = 0.0 - Lx = 50.0e3 - surfaceWindStressMax = 0.0 - iceShelfCavityThickness = 10 - iceShelfEdgeWidth = 15.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test2 b/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test2 deleted file mode 100644 index 78777074d..000000000 --- a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test2 +++ /dev/null @@ -1,25 +0,0 @@ -&basin - nVertLevelsMOD = 22 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'sub_ice_shelf_test2' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1100.0 - f0 = -1.4e-4 - beta = 0.0 - omega = 0.0 - Lx = 50.0e3 - surfaceWindStressMax = 0.1 - iceShelfCavityThickness = 10 - iceShelfEdgeWidth = 15.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test3 b/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test3 deleted file mode 100644 index 914bafb65..000000000 --- a/grid_gen/basin/Namelists/namelist.sub_ice_shelf_test3 +++ /dev/null @@ -1,25 +0,0 @@ -&basin - nVertLevelsMOD = 22 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'sub_ice_shelf_test3' - initial_conditions = 'sub_ice_shelf_test3' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .true. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1100.0 - f0 = -1.4e-4 - beta = 0.0 - omega = 0.0 - Lx = 50.0e3 - surfaceWindStressMax = 0.1 - iceShelfCavityThickness = 10 - iceShelfEdgeWidth = 15.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.unitTestCVMixConvection b/grid_gen/basin/Namelists/namelist.unitTestCVMixConvection deleted file mode 100644 index 5ba575b39..000000000 --- a/grid_gen/basin/Namelists/namelist.unitTestCVMixConvection +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'unitTestCVMixConvection' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1000.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 160.0e3 -/ diff --git a/grid_gen/basin/Namelists/namelist.unitTestCVMixShear b/grid_gen/basin/Namelists/namelist.unitTestCVMixShear deleted file mode 100644 index b375088cf..000000000 --- a/grid_gen/basin/Namelists/namelist.unitTestCVMixShear +++ /dev/null @@ -1,22 +0,0 @@ -&basin - nVertLevelsMOD = 20 - on_a_sphere = 'NO' - sphere_radius = 0.0 - expand_from_unit_sphere = .false. - zLevel_thickness = 'equally_spaced' - bottom_topography = 'flat_bottom' - initial_conditions = 'unitTestCVMixShear' - eliminate_inland_seas=.false. - load_woce_IC = .false. - write_OpenDX_flag = .false. - check_mesh = .true. - cut_domain_from_sphere = .false. - solid_boundary_in_y = .false. - solid_boundary_in_x = .false. - top_layers_without_land = 3 - layer_thickness_total_max = 1000.0 - f0 = 0.0 - beta = 0.0 - omega = 0.0 - Lx = 160.0e3 -/ diff --git a/grid_gen/basin/README b/grid_gen/basin/README deleted file mode 100644 index e64446471..000000000 --- a/grid_gen/basin/README +++ /dev/null @@ -1,24 +0,0 @@ -Program: basin - -This program reads in a MPAS grid file (grid.nc) and outputs a modified grid file -(ocean.nc) and it's associated graph.info files for partitions from 2 up to 1024 in powers of 2. - -The purpose of this code is to remove grid cells from any valid MPAS grid, and to -add initial condition variables like h, u, u_src, forcing, and tracers. -Please see source file src/basin.F to define the specifics of the output grid file. - -The required files are: - grid.nc is an mpas grid, either spherical or Cartesian - namelist.basin may point to a standard case in the namelists directory. - -After grid.nc and namelist.basin files has been placed in this directory, -simply run the script runit. - -This script will compile basin, run basin (producing an ocean.nc file) and use metis -to partition the graph.info file. - -The metis portion of the script requires the executable kmetis to be in your path. -If it is not, none of the graph.info.part.N files will be created, but can later be -created manually using metis and the assocaite graph.info file. - - diff --git a/grid_gen/basin/dx/README b/grid_gen/basin/dx/README deleted file mode 100644 index cb69a3aed..000000000 --- a/grid_gen/basin/dx/README +++ /dev/null @@ -1,14 +0,0 @@ -The dx directory will contain the files for OpenDX visualization after -basin is run. These files include: -h.data -mesh.cfg -mesh.net -ocean.area.data -ocean.dx -ocean.edge.data -ocean.face.data -ocean.loop.data -ocean.position.data -vector.dx -vector.position.data - diff --git a/grid_gen/basin/namelist.basin b/grid_gen/basin/namelist.basin deleted file mode 120000 index 05bd486db..000000000 --- a/grid_gen/basin/namelist.basin +++ /dev/null @@ -1 +0,0 @@ -Namelists/namelist.global_realistic \ No newline at end of file diff --git a/grid_gen/basin/runit b/grid_gen/basin/runit deleted file mode 100755 index 60f647f31..000000000 --- a/grid_gen/basin/runit +++ /dev/null @@ -1,20 +0,0 @@ -rm -f map -cd src -make clean -make -mv map .. -cd .. -rm -f graph* -./map -#cd ../metis-4.0 -kmetis graph.info 2 -kmetis graph.info 4 -kmetis graph.info 8 -kmetis graph.info 16 -kmetis graph.info 32 -kmetis graph.info 64 -kmetis graph.info 128 -kmetis graph.info 256 -kmetis graph.info 512 -kmetis graph.info 1024 -#cd ../basin diff --git a/grid_gen/basin/src/Makefile b/grid_gen/basin/src/Makefile deleted file mode 100644 index aade64cf0..000000000 --- a/grid_gen/basin/src/Makefile +++ /dev/null @@ -1,82 +0,0 @@ -# IBM with Xlf compilers -#FC = xlf90 -#CC = xlc -#FFLAGS = -qrealsize=8 -g -C -#CFLAGS = -g -#LDFLAGS = -g -C - -# pgf90 -#FC = pgf90 -#CC = pgcc -#FFLAGS = -r8 -O3 -#CFLAGS = -O3 -#LDFLAGS = -O3 - -## ifort -FC = ifort -CC = icc -FFLAGS = -real-size 64 #-g -traceback -check all -CFLAGS = #-g -LDFLAGS = #-g -traceback -check all - -# gfortran -#FC = gfortran -#CC = gcc -#FFLAGS = -O3 -m64 -ffree-line-length-none -fdefault-real-8 -fconvert=big-endian -ffree-form -#CFLAGS = -#LDFLAGS = - -# absoft -#FC = f90 -#CC = gcc -#FFLAGS = -dp -O3 -#CFLAGS = -O3 -#LDFLAGS = -O3 -#NETCDF = /Users/maltrud/local - - -CPP = cpp -P -traditional -CPPFLAGS = -CPPINCLUDES = -INCLUDES = -I$(NETCDF)/include - -LIBS = -L$(NETCDF)/lib -NCLIB = -lnetcdf -NCLIBF = -lnetcdff -ifneq ($(wildcard $(NETCDF)/lib/libnetcdff.*), ) # CHECK FOR NETCDF4 - LIBS += $(NCLIBF) -endif # CHECK FOR NETCDF4 -LIBS += $(NCLIB) - -RM = rm -f - -########################## - -.SUFFIXES: .F .o - - -OBJS = basin.o \ - utilities.o \ - module_read_netcdf.o \ - module_read_topo.o \ - module_read_TS.o \ - module_read_U.o \ - module_read_monthly.o \ - module_cullLoops.o \ - module_write_netcdf.o - -all: map - -basin.o: utilities.o module_write_netcdf.o module_read_netcdf.o module_read_topo.o module_read_TS.o module_read_U.o module_read_monthly.o module_cullLoops.o - -map: $(OBJS) - $(FC) $(LDFLAGS) -o $@ $(OBJS) $(LIBS) - -clean: - $(RM) *.o *.mod pop *.f90 - -.F.o: - $(RM) $@ $*.mod - $(CPP) $(CPPFLAGS) $(CPPINCLUDES) $< > $*.f90 - $(FC) $(FFLAGS) -c $*.f90 $(INCLUDES) - #$(RM) $*.f90 diff --git a/grid_gen/basin/src/basin.F b/grid_gen/basin/src/basin.F deleted file mode 100644 index 2afcfff80..000000000 --- a/grid_gen/basin/src/basin.F +++ /dev/null @@ -1,3192 +0,0 @@ -program map_to_basin - -use read_netcdf -use read_topo -use read_TS -use read_U -use read_MONTHLY -use write_netcdf -use utilities -use cullLoops - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Program: basin.F -! -! This program is meant to add land to grids, as well as initial conditions. -! -! This program is used to take a specific mesh, and remove Cells from it -! It can be used to change a planar grid into a Channel or a basin grid, or to -! Change a spherical grid into a Limited area spherical grid. -! -! How to use: -! Step 1: Link namelist.basin to the correct namelist file. -! Step 2: Change parameters and flags in namelist file as needed. -! Step 3: Check get_init_conditions routine for initial T&S, thickness, etc. -! Step 4: Check define_kmt routine for bottomDepth and kmt (maxLevelCell) variables. -! Step 5: Check get_dz routine for hZLevel variable. -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -implicit none - -! original grid variables -integer :: time, nCells, nEdges, nVertices -integer :: maxEdges, maxEdges2, TWO, vertexDegree, nVertLevels -integer, allocatable, dimension(:) :: indexToCellID, indexToEdgeID, indexToVertexID -real, allocatable, dimension(:) :: xCell, yCell, zCell, latCell, lonCell, meshDensity -real, allocatable, dimension(:) :: xEdge, yEdge, zEdge, latEdge, lonEdge -real, allocatable, dimension(:) :: xVertex, yVertex, zVertex, latVertex, lonVertex -integer, allocatable, dimension(:) :: nEdgesOnCell, nEdgesOnEdge -integer, allocatable, dimension(:,:) :: cellsOnCell, edgesOnCell, verticesOnCell -integer, allocatable, dimension(:,:) :: cellsOnEdge, verticesOnEdge, edgesOnEdge -integer, allocatable, dimension(:,:) :: cellsOnVertex, edgesOnVertex -real, allocatable, dimension(:) :: areaCell, areaTriangle, dcEdge, dvEdge, angleEdge -real, allocatable, dimension(:,:) :: kiteAreasOnVertex, weightsOnEdge - -real, allocatable, dimension(:) :: fCell, fEdge, fVertex, bottomDepth, work1, surfaceWindStress -real, allocatable, dimension(:,:,:) :: normalVelocity, tangentialVelocity, layerThickness -real, allocatable, dimension(:,:,:) :: density - -integer nt_lon, nt_lat, nt_depth -integer nu_lon, nu_lat, nu_depth -real(kind=4), allocatable, dimension(:) :: t_lon, t_lat, depth_t -real(kind=4), allocatable, dimension(:) :: u_lon, u_lat, depth_u -real(kind=4), allocatable, dimension(:,:) :: mTEMP, mSALT -real(kind=4), allocatable, dimension(:,:,:) :: TEMP, SALT -real(kind=4), allocatable, dimension(:,:) :: TAUX, TAUY - -real(kind=4), allocatable, dimension(:,:,:) :: SST_MONTHLY, SSS_MONTHLY -real(kind=4), allocatable, dimension(:,:,:) :: TAUX_MONTHLY, TAUY_MONTHLY - -real, dimension(:), allocatable :: dz -integer :: nMonths = 1 - - real (kind=8) :: ymid, ytmp, ymax, xmid, xloc, yloc, pert, ymin, distance, r, c1(3), c2(3) - real (kind=8) :: latmid, lattmp, latmax, latmin - integer :: cell1, cell2 -real :: eos_linear_alpha, eos_linear_beta, eos_linear_Tref, eos_linear_Sref, eos_linear_densityref - -! new grid variables -real, allocatable, dimension(:) :: hZLevel, refBottomDepth -integer :: nCellsNew, nEdgesNew, nVerticesNew -integer :: maxEdgesNew, maxEdges2New, TWONew, vertexDegreeNew, nVertLevelsNew -integer, allocatable, dimension(:) :: indexToCellIDNew, indexToEdgeIDNew, indexToVertexIDNew -real, allocatable, dimension(:) :: xCellNew, yCellNew, zCellNew, latCellNew, lonCellNew, meshDensityNew, meshSpacingNew -real, allocatable, dimension(:) :: xEdgeNew, yEdgeNew, zEdgeNew, latEdgeNew, lonEdgeNew -real, allocatable, dimension(:) :: xVertexNew, yVertexNew, zVertexNew, latVertexNew, lonVertexNew -integer, allocatable, dimension(:) :: nEdgesOnCellNew, nEdgesOnEdgeNew, flipVerticesOnEdgeOrdering -integer, allocatable, dimension(:,:) :: cellsOnCellNew, edgesOnCellNew, verticesOnCellNew -integer, allocatable, dimension(:,:) :: cellsOnEdgeNew, verticesOnEdgeNew, edgesOnEdgeNew -integer, allocatable, dimension(:,:) :: cellsOnVertexNew, edgesOnVertexNew -integer, allocatable, dimension(:,:) :: boundaryEdgeNew, boundaryVertexNew -real, allocatable, dimension(:) :: areaCellNew, areaTriangleNew, dcEdgeNew, dvEdgeNew, angleEdgeNew -real, allocatable, dimension(:,:) :: kiteAreasOnVertexNew, weightsOnEdgeNew, normalsNew - -real, allocatable, dimension(:) :: fCellNew, fEdgeNew, fVertexNew, bottomDepthNew -real, allocatable, dimension(:) :: surfaceWindStressNew -real, allocatable, dimension(:) :: surfaceWindStressNewZonal, surfaceWindStressNewMeridional -real, allocatable, dimension(:,:,:) :: normalVelocityNew, layerThicknessNew -real, allocatable, dimension(:,:,:) :: densityNew, temperatureNew, salinityNew, tracer1New -real, allocatable, dimension(:) :: temperatureRestoreNew, salinityRestoreNew -real, allocatable, dimension(:) :: boundaryLayerDepthNew - -! mapping variables -integer, allocatable, dimension(:) :: kmt, maxLevelCellNew -integer, allocatable, dimension(:) :: cellMap, edgeMap, vertexMap - -! work variables -integer :: i,j,jNew,k,jEdge,jEdgeNew,iVertex1New,iVertex2New,iCell1New,iCell2New -integer :: iCell, iCell1, iCell2, iCell3, iEdge, iVertex, iVertex1, iVertex2 -integer :: iCellNew, iEdgeNew, iVertexNew, ndata, jCell1, jCell2, jCell, iter -real :: xin, yin, zin, ulon, ulat, ux, uy, uz, rlon, rlat, temp_t, temp_s - -integer :: iMonth -character(len=80) :: fileNameT, fileNameS, fileNameU - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! -! Namelist variables -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - -! Variables in namelist file -character (len=32) :: on_a_sphere, zLevel_thickness,bottom_topography, initial_conditions -logical :: expand_from_unit_sphere, eliminate_inland_seas, load_woce_IC, & - write_OpenDX_flag, check_mesh, & - cut_domain_from_sphere, solid_boundary_in_y, solid_boundary_in_x, & - amplify_acc_wind, load_phc_IC - -integer :: nVertLevelsMOD, top_layers_without_land -real (kind=8) :: sphere_radius, layer_thickness_total_max, f0, beta, omega, Lx, & - uniform_T, uniform_S, uniform_tracer1, & - amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat, surfaceWindStressMax, & - iceShelfCavityThickness, iceShelfEdgeWidth - -! specify namelist -namelist /basin/ nVertLevelsMOD, on_a_sphere, sphere_radius, & - expand_from_unit_sphere, & - zLevel_thickness, bottom_topography, initial_conditions, & - eliminate_inland_seas, load_woce_IC, load_phc_IC, write_OpenDX_flag, check_mesh, & - cut_domain_from_sphere, solid_boundary_in_y, solid_boundary_in_x, & - top_layers_without_land, layer_thickness_total_max, f0, beta, omega, Lx, & - uniform_T, uniform_S, uniform_tracer1, surfaceWindStressMax, & - amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat, & - iceShelfCavityThickness, iceShelfEdgeWidth - -! Default namelist values. Default set for realistic global IC. -nVertLevelsMOD = 40 -on_a_sphere = 'YES' -sphere_radius = 6.37122e6 -expand_from_unit_sphere = .true. - -! zLevel thickness options: -! 'POP_40_zLevel', 'equally_spaced', 'zero' -zLevel_thickness = 'POP_40_zLevel' - -! bottom topography options: -! 'realistic_ETOPO', 'flat_bottom', 'Ilicak2_overflow', 'DOME_3D_overflow' -bottom_topography = 'realistic_ETOPO' - -! initial temperature and salinity options: -! 'realistic_WOCE', 'realistic_PHC', 'uniform_TS', 'lock_exchange_Ilicak1', 'Ilicak2_overflow', 'Ilicak2_overflow_sigma', 'DOME_3D_overflow', -! 'internal_wave_Ilicak3', 'baroclinic_channel_Ilicak4' -initial_conditions = 'realistic_WOCE' -uniform_T = 10.0 -uniform_S = 34.0 -uniform_tracer1 = 1.0 - -eliminate_inland_seas=.true. -load_woce_IC = .true. -load_phc_IC = .false. -write_OpenDX_flag = .false. -check_mesh = .true. -cut_domain_from_sphere = .false. -solid_boundary_in_y = .false. -solid_boundary_in_x = .false. - -! Set the number of top layers that are not allowed to have land, usually three. -top_layers_without_land = 3 - -layer_thickness_total_max = 2000.0 ! total layer thickness, for equally spaced case -surfaceWindStressMax = 0.1 ! max wind stress, N/m2 -f0 = -1.1e-4 ! Coriolis parameter -beta = 1.4e-11 -omega = 7.29212e-5 ! rotation rate of earth - -! This needs to be changed for correct periodic boundaries -! Lx is the TOTAL domain width, and needs to be exact for correct periodic -! boundaries in x. -Lx = 3200.0e3 ! 40x80km=3200km - -! amplify wind stress in acc, for study with Valis -amplify_acc_wind = .false. -amp_wind_factor = 2.0 -amp_wind_center_lat = -35.0 -amp_wind_spread_lat = 3.0 - -iceShelfCavityThickness = 100 -iceShelfEdgeWidth = 15.0e3 - -! Read in namelist - open(20,file='namelist.basin',status='old') - read(20,basin) - close(20) - -if(load_woce_IC .and. load_phc_IC) then - write(0,*) 'Error. PHC and WOCE cannot both be loaded.' - write(0,*) 'Please edit namelist.basin and update load_woce_IC or load_phc_IC.' - stop -end if - -allocate (hZLevel(nVertLevelsMOD), refBottomDepth(nVertLevelsMOD)) - - -nMonths = 1 - -if (load_woce_IC) then - fileNameT = 'TS/annual/woce_t_ann.3600x2431x42interp.r4.nc' - fileNameS = 'TS/annual/woce_s_ann.3600x2431x42interp.r4.nc' -else if (load_phc_IC) then - fileNameT = 'PHC/PT.01.filled.60levels.nc' - fileNameS = 'PHC/Salt.01.filled.60levels.nc' -end if -fileNameU = 'TS/annual/ws.old_ncep_1958-2000avg.interp3600x2431.nc' - -! get to work -write(6,*) ' starting' -write(6,*) - -! get depth profile for later -write(6,*) ' calling get_dz' -call get_dz - -! get grid -write(6,*) ' calling read_grid' -write(6,*) -call read_grid -write(6,*) ' xCell 1: ',minval(xCell), maxval(xCell) - -! copy dimensions -write(6,*) ' copy dimensions' -write(6,*) -call copy_dimensions -write(6,*) ' xCell 1: ',minval(xCell), maxval(xCell) - -! define the kmt array -write(6,*) ' calling define_kmt' -write(6,*) -call define_kmt - -! define the mapping between original and new cells, edges and vertices -write(6,*) ' calling define_mapping' -write(6,*) -call define_mapping - -! copy the vector arrays form the original to new arrays -write(6,*) ' calling map_vectors' -write(6,*) -call map_vectors - -! define the new connectivity variables -write(6,*) ' calling map_connectivity' -write(6,*) -call map_connectivity - -! check the mesh -if (check_mesh) then - call error_checking -endif - -if (load_woce_IC .or. load_phc_IC) then - write(6,*) ' getting woce t and s ' - - call read_TS_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS) - write(6,*) ' TS INIT ', nt_lon, nt_lat, nt_depth - allocate(t_lon(nt_lon), t_lat(nt_lat), depth_t(nt_depth), TEMP(nt_lon,nt_lat,nt_depth), SALT(nt_lon,nt_lat,nt_depth)) - allocate(mTEMP(nt_lat,nt_depth), mSALT(nt_lat,nt_depth)) - call read_TS_fields(t_lon, t_lat, depth_t, TEMP, SALT) - call read_TS_finalize() - - write(6,*) ' getting woce windStress ' - call read_U_init(nu_lon, nu_lat, nu_depth, fileNameU) - write(6,*) ' WINDSTRESS INIT ', nu_lon, nu_lat, nu_depth - allocate(u_lon(nu_lon), u_lat(nu_lat), depth_u(nu_depth)) - allocate(TAUX(nu_lon,nu_lat), TAUY(nu_lon,nu_lat)) - call read_U_fields(u_lon, u_lat, depth_u, TAUX, TAUY) - call read_U_finalize() - - - do k=1,nt_depth - ndata = 0; temp_t=0; temp_s=0 - do j=1,nt_lat - do i=1,nt_lon - if(TEMP(i,j,k).gt.-10.0) then - ndata = ndata + 1 - temp_t = temp_t + TEMP(i,j,k) - temp_s = temp_s + SALT(i,j,k) - endif - enddo - enddo - mTEMP(:,k) = temp_t / float(ndata) - mSALT(:,k) = temp_s / float(ndata) - write(6,*) ndata,mTemp(1,k),mSalt(1,k) - enddo - -endif - -! allocate(SST_MONTHLY(nt_lon,nt_lat,nMonths), SSS_MONTHLY(nt_lon,nt_lat,nMonths)) -! allocate(TAUX_MONTHLY(nt_lon,nt_lat,nMonths), TAUY_MONTHLY(nt_lon,nt_lat,nMonths)) -! SST_MONTHLY=0; SSS_MONTHLY=0; TAUX_MONTHLY=0; TAUY_MONTHLY=0 -! iMonth=1 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.01.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly01.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.01.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=2 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.02.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly02.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.02.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=3 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.03.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly03.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.03.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=4 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.04.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly04.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.04.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=5 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.05.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly05.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.05.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=6 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.06.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly06.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.06.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=7 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.07.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly07.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.07.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=8 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.08.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly08.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.08.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=9 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.09.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly09.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.09.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=10 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.10.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly10.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.10.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=11 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.11.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly11.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.11.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() -! -! iMonth=12 -! fileNameT='TS/SST.shf.normal_year+Hurrell.monthly.12.interp3600x2431.nc' -! fileNameS='TS/SSS.sfwf.CORE_SSS+precip.monthly12.interp3600x2431.nc' -! fileNameU='TS/TAUIJ.ws.old_ncep_1958-2000avg.mon.12.interp3600x2431.nc' -! call read_MONTHLY_init(nt_lon, nt_lat, nt_depth,fileNameT, fileNameS, fileNameU) -! write(6,*) nt_lon,nt_lat,nt_depth -! call read_MONTHLY_fields(SST_MONTHLY(:,:,iMonth), SSS_MONTHLY(:,:,iMonth), TAUX_MONTHLY(:,:,iMonth), TAUY_MONTHLY(:,:,iMonth)) -! call read_MONTHLY_finalize() - -! generate initial conditions -call get_init_conditions - -! dump new grid to netCDF -write(6,*) ' calling write_grid' -write(6,*) -call write_grid - -! dump graph for partioning -write(6,*) ' call write_graph' -write(6,*) -call write_graph - -! write OpenDx file -if (write_OpenDX_flag) then - write(6,*) ' calling write_OpenDX' - write(6,*) - call write_OpenDX( on_a_sphere, & - nCellsNew, & - nVerticesNew, & - nEdgesNew, & - vertexDegreeNew, & - maxEdgesNew, & - xCellNew, & - yCellNew, & - zCellNew, & - xVertexNew, & - yVertexNew, & - zVertexNew, & - xEdgeNew, & - yEdgeNew, & - zEdgeNew, & - nEdgesOnCellNew, & - verticesOnCellNew, & - verticesOnEdgeNew, & - cellsOnVertexNew, & - edgesOnCellNew, & - areaCellNew, & - maxLevelCellNew, & - meshDensityNew, & - bottomDepthNew, & - temperatureNew(1,1,:), & - kiteAreasOnVertexNew ) -endif - -!do iCell=1,nCellsNew - !ulon = 1.0; ulat = 0.0 - !xin = xCellNew(iCell); yin = yCellNew(iCell); zin = zCellNew(iCell) - !call transform_from_lonlat_to_xyz(xin, yin, zin, ulon, ulat, ux, uy, uz) - !if(abs(ux).lt.1.0e-10) ux=0.0 - !if(abs(uy).lt.1.0e-10) uy=0.0 - !if(abs(uz).lt.1.0e-10) uz=0.0 - !write(20,10) ux, uy, uz - !10 format(3e25.10) -!enddo - -write(6,*) ' finished' - -contains - -subroutine write_graph -implicit none -integer :: m,itmp(maxEdgesNew),k - - m=nEdgesNew - do i=1,nCellsNew - do j=1,nEdgesOnCellNew(i) - if(cellsOnCellNew(j,i).eq.0) m=m-1 - enddo - enddo - - open(42,file='graph.info',form='formatted') - write(42,*) nCellsNew, m - do i=1,nCellsNew - itmp = 0; k = 0; - do j=1,nEdgesOnCellNew(i) - if(cellsOnCellNew(j,i).gt.0) then - k=k+1; itmp(k)=cellsOnCellNew(j,i) - endif - enddo - write(42,'(1x,12i8)',advance='no') (itmp(m),m=1,k) - write(42,'(1x)') - end do - close(42) -end subroutine write_graph - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! -! Step 3: Check get_init_conditions routine for initial T&S, thickness, etc. -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine get_init_conditions -implicit none -real :: halfwidth, dtr, pi, p(3), q(3), xin, yin, zin, ulon, ulat, stress, n1, n2, distance, r, temp_t, temp_s -real :: dotProd, rho_ref, rho_delta, work, factor, r1, deltay, b -real :: y0_embayment, surfaceTemperature, bottomTemperature, betaTemperature,bottomMinTemp, & - y_a,y_0,A_0, midDepth(nVertLevelsMod), maxMidDepth -real :: bottomSalinity, surfaceSalinity, cavitySalinity, refSalinity, maxDepth, xWindStress, yWindStress -real :: x_0, x_1,x_2,x_3,width,cff1, deltaTemperature -real :: totalSubIceThickness, y1,y2,y3, d1,d2,d3 -real :: lat1, lat2, lat3, lat4, T1, T2 -integer :: iTracer, ix, iy, ndata, i, j, k, ixt, iyt, ncull, jcount, iNoData, kdata(nVertLevelsMod), iMonth, kMax -logical :: flag_lat - -pi = 4.0*atan(1.0) -dtr = pi/180.0 - -! defaults -layerThicknessNew = 100.0 -temperatureNew = 1.0 -salinityNew = 1.0 -tracer1New = 1.0 -normalVelocityNew = 0 -surfaceWindStressNew = 0 -densityNew = 1025.0 - -! initialize boundary layer fields to reasonable values -! specific cases can overwrite as desired -boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - -if (initial_conditions.eq.'uniform_TS') then - - temperatureNew = uniform_T !10.0 - salinityNew = uniform_S !34.0 - tracer1New = uniform_tracer1 !1.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1, nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'gradT_lat') then - -! This is a very simple test case that can test grid connectivity, Coriolis -! force, and pressure gradient on a sphere. It is simply a temperature -! gradient in latitude, which produces two zonal jets. - - salinityNew = uniform_S - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1, nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - - lat1 = 30.0*pi/180.0 - lat2 = 60.0*pi/180.0 - lat3 = 80.0*pi/180.0 - lat4 = 85.0*pi/180.0 - T1 = 100.0 - T2 = 0.0 - do iCell = 1,nCellsNew - if(abs(latCellNew(iCell)) < lat1 ) then - temperatureNew(1,:,iCell) = T1 - elseif(abs(latCellNew(iCell)) < lat2 ) then - temperatureNew(1,:,iCell) = T1 + (T2-T1)*(abs(latCellNew(iCell)) - lat1)/(lat2-lat1) - else - temperatureNew(1,:,iCell) = T2 - endif - - if(abs(latCellNew(iCell)) < lat3 ) then - tracer1New(1,:,iCell) = sin(lonCellNew(iCell)*10) - elseif(abs(latCellNew(iCell)) > lat4 ) then - tracer1New(1,:,iCell) = 0 - else - tracer1New(1,:,iCell) = sin(lonCellNew(iCell)*10) * (abs(latCellNew(iCell)) - lat4)/(lat3-lat4) - endif - - enddo - -elseif (initial_conditions.eq.'lock_exchange_Ilicak1') then - - do i = 1,nCellsNew - if(yCellNew(i) < 32.0e3) then - temperatureNew(1,:,i) = 5.0 - else - temperatureNew(1,:,i) = 30.0 - endif - enddo - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'Ilicak2_overflow') then - - do i = 1,nCellsNew - if(yCellNew(i) < 20000) then - temperatureNew(1,:,i) = 10.0 - else - temperatureNew(1,:,i) = 20.0 - endif - enddo - - salinityNew(1,:,:) = 35.0 - Tracer1New(1,:,:) = 1.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'Ilicak2_overflow_sigma') then - - do i = 1,nCellsNew - if(yCellNew(i) < 20000) then - temperatureNew(1,:,i) = 10.0 - else - temperatureNew(1,:,i) = 20.0 - endif - enddo - - salinityNew(1,:,:) = 35.0 - Tracer1New(1,:,:) = 1.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = bottomDepthNew(iCell) / nVertLevelsMOD - enddo - enddo - -elseif (initial_conditions.eq.'internal_wave_Ilicak3') then - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - surfaceTemperature = 20.1 - bottomTemperature = 10.1 - - midDepth(1) = hZLevel(1)/2.0 - do k=2,nVertLevelsMod - midDepth(k) = midDepth(k-1) + 0.5*(hZLevel(k-1) + hZLevel(k)) - enddo - - maxMidDepth = maxval(midDepth) - do k = 1, nVertLevelsMOD - temperatureNew(1,k,:) = (surfaceTemperature - bottomTemperature) & - * ((maxMidDepth - midDepth(k))/maxMidDepth) + bottomTemperature - enddo - - A_0 = 2.0 - y_0 = 125.0e3 - y_a = 50.0e3 - do i = 1, nCellsNew - if ( abs(yCellNew(i) - y_0) < y_a) then - do k = 2, nVertLevelsMOD - betaTemperature = -A_0*cos(0.5*pi*(yCellNew(i)-y_0)/y_a) & - *sin(pi*refBottomDepth(k-1)/(maxMidDepth - midDepth(1))) - temperatureNew(1, k, i) = temperatureNew(1, k, i) + betaTemperature - end do - endif - enddo - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'baroclinic_channel_Ilicak4') then - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - midDepth(1) = hZLevel(1)/2.0 - do k=2,nVertLevelsMod - midDepth(k) = midDepth(k-1) + 0.5*(hZLevel(k-1) + hZLevel(k)) - enddo - - maxMidDepth = maxval(midDepth) - - ! Set up stratification on northern half - surfaceTemperature = 13.1 - bottomTemperature = 10.1 - deltaTemperature = 1.2 - do k = 1, nVertLevelsMOD - temperatureNew(1,k,:) = bottomTemperature & - + (surfaceTemperature - bottomTemperature) * ((-midDepth(k)+maxMidDepth)/maxMidDepth) - enddo - - y_0 = 250.0e3 - x_0 = 0.0e3 - x_1 = 160.0e3 - x_2 = 110.0e3 - x_3 = 130.0e3 - width = 40.0e3 - do i = 1, nCellsNew - ! sine wave defines southern boundary between two temperatures. - cff1 = width * sin (6.0 * 3.141592 * (xCellNew(i) - x_0)/(x_1 - x_0)) - - ! stratification on south half - if( yCellNew(i) < y_0 - cff1 ) then - do k = 1, nVertLevelsMOD - temperatureNew(1,k,i) = temperatureNew(1,k,i) - deltaTemperature - end do - - ! linear interpolation between two halves - else if( yCellNew(i) .ge. y_0 - cff1 .and. yCellNew(i) .le. y_0 - cff1+width) then - do k = 1, nVertLevelsMOD - temperatureNew(1,k,i) = temperatureNew(1,k,i) & - - deltaTemperature*(1.0 -( yCellNew(i) - (y_0 - cff1)) / (1.0 * width)) - end do - endif - enddo - - ! Add an additional perturbation to southern end of third wave. - do i = 1, nCellsNew - cff1 = 0.5 * width * sin(1.0 * 3.141592 * (xCellNew(i) - x_2)/(x_3 - x_2)) - if( yCellNew(i) .ge. y_0 - cff1-0.5*width .and. & - yCellNew(i) .le. y_0 - cff1+0.5*width .and. & - xCellNew(i) .ge. x_2 .and. xCellNew(i) .le. x_3) then - do k = 1, nVertLevelsMOD - temperatureNew(1,k,i) = temperatureNew(1,k,i) + 0.3 * (1.0 - ( (yCellNew(i)-(y_0-cff1))/(0.5*width))) - end do - endif - end do - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'sub_ice_shelf_test1') then - - ! points 1 and 2 are where angles on ice shelf are located. - ! point 3 is at the surface. - ! d variables are total water thickness below ice shelf. - y1=30.0e3 - y2=60.0e3 - y3=60.0e3+iceShelfEdgeWidth - d1=iceShelfCavityThickness - d2=iceShelfCavityThickness+500 - d3=layer_thickness_total_max - - bottomSalinity = 34.7 - surfaceSalinity = 34.5 - - temperatureNew(1,:,:) = 1.0 - normalVelocityNew = 0.0 - - surfaceWindStressNew = 0.0 - - ! set up sub ice shelf thicknesses - do iCell=1,nCellsNew - if (yCellNew(iCell) < y1 ) then - totalSubIceThickness = d1 - elseif (yCellNew(iCell) < y2 ) then - totalSubIceThickness = d1 + (d2-d1)*(yCellNew(iCell)-y1)/(y2-y1) - elseif (yCellNew(iCell) < y3 ) then - totalSubIceThickness = d2 + (d3-d2)*(yCellNew(iCell)-y2)/(y3-y2) - else - totalSubIceThickness = d3 - endif - layerThicknessNew(1,:,iCell) = totalSubIceThickness/nVertLevelsMOD - - enddo - - ! Set up salinity stratification - do iCell=1,nCellsNew - midDepth(nVertLevelsMod) = layer_thickness_total_max - 0.5*layerThicknessNew(1,nVertLevelsMod,iCell) - do k=nVertLevelsMod-1,1,-1 - midDepth(k) = midDepth(k+1) - 0.5*(layerThicknessNew(1,k+1,iCell)+layerThicknessNew(1,k,iCell)) - enddo - - ! Set up salinity stratification - do k = 1, nVertLevelsMOD - salinityNew(1,k,iCell) = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - enddo - - enddo - -elseif (initial_conditions.eq.'sub_ice_shelf_test2') then - - ! points 1 and 2 are where angles on ice shelf are located. - ! point 3 is at the surface. - ! d variables are total water thickness below ice shelf. - y1=30.0e3 - y2=60.0e3 - y3=60.0e3+iceShelfEdgeWidth - d1=iceShelfCavityThickness - d2=iceShelfCavityThickness+500 - d3=layer_thickness_total_max - - bottomSalinity = 34.7 - surfaceSalinity = 34.5 - cavitySalinity = 34.3 - - temperatureNew(1,:,:) = 1.0 - normalVelocityNew = 0.0 - temperatureRestoreNew(:) = 1.0 - salinityRestoreNew(:) = surfaceSalinity - - ! set up sub ice shelf thicknesses - do iCell=1,nCellsNew - if (yCellNew(iCell) < y1 ) then - totalSubIceThickness = d1 - elseif (yCellNew(iCell) < y2 ) then - totalSubIceThickness = d1 + (d2-d1)*(yCellNew(iCell)-y1)/(y2-y1) - elseif (yCellNew(iCell) < y3 ) then - totalSubIceThickness = d2 + (d3-d2)*(yCellNew(iCell)-y2)/(y3-y2) - else - totalSubIceThickness = d3 - endif - layerThicknessNew(1,:,iCell) = totalSubIceThickness/nVertLevelsMOD - enddo - - ! Set up salinity stratification - do iCell=1,nCellsNew - midDepth(nVertLevelsMod) = layer_thickness_total_max - 0.5*layerThicknessNew(1,nVertLevelsMod,iCell) - do k=nVertLevelsMod-1,1,-1 - midDepth(k) = midDepth(k+1) - 0.5*(layerThicknessNew(1,k+1,iCell)+layerThicknessNew(1,k,iCell)) - enddo - - ! Set up salinity stratification - if (yCellNew(iCell) < y1 ) then - salinityNew(1,:,iCell) = cavitySalinity - elseif (yCellNew(iCell) < y2 ) then - do k = 1, nVertLevelsMOD - ! Salinity of stratified column in open ocean - refSalinity = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - ! linearly interpolate horizontally between cavity and open ocean - salinityNew(1,k,iCell) = cavitySalinity & - + (refSalinity - cavitySalinity) * (yCellNew(iCell) - y1)/(y2 - y1) - enddo - else - do k = 1, nVertLevelsMOD - salinityNew(1,k,iCell) = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - enddo - endif - - enddo - - ! set up wind stress - xWindStress = 0.0; - yWindStress = surfaceWindStressMax; - do iEdge=1,nEdgesNew - if (yEdgeNew(iEdge) < y3 ) then - surfaceWindStressNew(iEdge) = 0.0 - else - surfaceWindStressNew(iEdge) = & - xWindStress*cos(angleEdgeNew(iEdge)) & - + yWindStress*sin(angleEdgeNew(iEdge)) - endif - enddo - -elseif (initial_conditions.eq.'sub_ice_shelf_test3') then - - ! points 1 and 2 are where angles on ice shelf are located. - ! point 3 is at the surface. - ! d variables are total water thickness below ice shelf. - y1=30.0e3 - y2=60.0e3 - y3=60.0e3+iceShelfEdgeWidth - d1=iceShelfCavityThickness - d2=iceShelfCavityThickness+500 - d3=layer_thickness_total_max - - bottomSalinity = 34.7 - surfaceSalinity = 34.5 - cavitySalinity = 34.3 - - temperatureNew(1,:,:) = 1.0 - normalVelocityNew = 0.0 - temperatureRestoreNew(:) = 1.0 - salinityRestoreNew(:) = surfaceSalinity - - ! set up sub ice shelf thicknesses - do iCell=1,nCellsNew - if (yCellNew(iCell) < y1 ) then - totalSubIceThickness = d1 - elseif (yCellNew(iCell) < y2 ) then - totalSubIceThickness = d1 + (d2-d1)*(yCellNew(iCell)-y1)/(y2-y1) - elseif (yCellNew(iCell) < y3 ) then - totalSubIceThickness = d2 + (d3-d2)*(yCellNew(iCell)-y2)/(y3-y2) - else - totalSubIceThickness = d3 - endif - ! subtract out the bottom land cells, divide by remaining number of cells. - layerThicknessNew(1,1:maxLevelCellNew(iCell),iCell) = (totalSubIceThickness - (layer_thickness_total_max - bottomDepthNew(iCell))) / maxLevelCellNew(iCell) - layerThicknessNew(1,maxLevelCellNew(iCell)+1:nVertLevelsMod,iCell) = layer_thickness_total_max/nVertLevelsMod - enddo - - ! Set up salinity stratification - do iCell=1,nCellsNew - kMax = maxLevelCellNew(iCell) - midDepth(kMax) = bottomDepthNew(iCell) - 0.5*layerThicknessNew(1,kMax,iCell) - do k=kMax-1,1,-1 - midDepth(k) = midDepth(k+1) - 0.5*(layerThicknessNew(1,k+1,iCell)+layerThicknessNew(1,k,iCell)) - enddo - - ! Set up salinity stratification - if (yCellNew(iCell) < y1 ) then - salinityNew(1,:,iCell) = cavitySalinity - elseif (yCellNew(iCell) < y2 ) then - do k = 1, kMax - ! Salinity of stratified column in open ocean - refSalinity = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - ! linearly interpolate horizontally between cavity and open ocean - salinityNew(1,k,iCell) = cavitySalinity & - + (refSalinity - cavitySalinity) * (yCellNew(iCell) - y1)/(y2 - y1) - enddo - else - do k = 1, kMax - salinityNew(1,k,iCell) = surfaceSalinity & - + (bottomSalinity - surfaceSalinity) * (midDepth(k)/layer_thickness_total_max) - enddo - endif - - enddo - - ! set up wind stress - xWindStress = 0.0; - yWindStress = surfaceWindStressMax; - do iEdge=1,nEdgesNew - if (yEdgeNew(iEdge) < y3 ) then - surfaceWindStressNew(iEdge) = 0.0 - else - surfaceWindStressNew(iEdge) = & - xWindStress*cos(angleEdgeNew(iEdge)) & - + yWindStress*sin(angleEdgeNew(iEdge)) - endif - enddo - - -elseif (initial_conditions.eq.'unitTestCVMixConvection') then - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - midDepth(1) = hZLevel(1)/2.0 - do k=2,nVertLevelsMod - midDepth(k) = midDepth(k-1) + 0.5*(hZLevel(k-1) + hZLevel(k)) - enddo - - maxMidDepth = maxval(midDepth) - - ! Set up stratification on northern half - surfaceTemperature = 13.0 - bottomTemperature = 10.0 - do k = 1, nVertLevelsMOD - temperatureNew(1,k,:) = bottomTemperature & - + (surfaceTemperature - bottomTemperature) * ((-midDepth(k)+maxMidDepth)/maxMidDepth) - enddo - - temperatureRestoreNew(:) = surfaceTemperature - 10.0 - salinityRestoreNew(:) = salinityNew(1,1,:) - boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - - do iEdge=1,nEdgesNew - surfaceWindStressNew(iEdge) = 0.001*cos(angleEdgeNew(iEdge)) - enddo - -elseif (initial_conditions.eq.'unitTestCVMixShear') then - - salinityNew(1,:,:) = 35.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - - midDepth(1) = hZLevel(1)/2.0 - do k=2,nVertLevelsMod - midDepth(k) = midDepth(k-1) + 0.5*(hZLevel(k-1) + hZLevel(k)) - enddo - - maxMidDepth = maxval(midDepth) - - ! Set up stratification on northern half - surfaceTemperature = 15.0 - bottomTemperature = 5.0 - do k = 1, nVertLevelsMOD - temperatureNew(1,k,:) = bottomTemperature & - + (surfaceTemperature - bottomTemperature) * ((-midDepth(k)+maxMidDepth)/maxMidDepth) - enddo - - temperatureRestoreNew(:) = surfaceTemperature + 10.0 - salinityRestoreNew(:) = salinityNew(1,1,:) - boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - - do iEdge=1,nEdgesNew - surfaceWindStressNew(iEdge) = 0.10*cos(angleEdgeNew(iEdge)) - enddo - -elseif (initial_conditions.eq.'DOME_3D_overflow') then - - y0_embayment = 600.0e3 ! y location of beginning of embayment - Tracer1New(1,:,:) = 0.0 - normalVelocityNew = 0.0 - surfaceWindStressNew = 0.0 - do k=1,nVertLevelsMOD - temperatureNew(1,k,:) = 20-0.5*k - salinityNew(1,k,:) = 35.0 - enddo - - ! Change embayment fluid to have tracer 1 and cold water. - ! This is only used for a plug of cold water in the initial conditions, - ! rather than forced cold water at the inlet. - !do iCell=1,nCellsNew - ! if (yCellNew(iCell).gt.y0_embayment) then - ! temperatureNew(1,:,iCell) = 0.0 - ! tracer1New(1,:,iCell) = 1.0 - ! endif - !enddo - - do iCell=1,nCellsNew - do k=1,nVertLevelsMod - layerThicknessNew(1,k,iCell) = hZLevel(k) - enddo - enddo - -elseif (initial_conditions.eq.'SOMA_TS') then - - temperatureNew = -99.0 - salinityNew = -99.0 - rho_ref=1000.0 - rho_delta=5.0 - do iCell=1,nCellsNew - do k = 1,maxLevelCellNew(iCell) - layerThicknessNew(1,k,iCell) = hZLevel(k) - if(k.eq.1) r1 = -refBottomDepth(k)/2.0 - if(k.ne.1) r1 = -(refBottomDepth(k)+refBottomDepth(k-1))/2.0 - work = rho_ref - (1.0-0.05)*rho_delta*tanh(r1/300) - 0.05*rho_delta*r1/2500 - densityNew(1,k,iCell) = work - factor = (rho_ref-work)/2.5e-1 - temperatureNew(1,k,iCell) = 20.0 + factor - factor = -r1/1250.0 - salinityNew(1,k,iCell) = 34.0 + factor ! salinity - enddo - enddo - - surfaceWindStressNew = 0.0 - do iEdge=1,nEdgesNew - xin = xEdgeNew(iEdge) - yin = yEdgeNew(iEdge) - zin = zEdgeNew(iEdge) - rlon = lonEdgeNew(iEdge) - rlat = latEdgeNew(iEdge) - - b=1.25e6 - deltay = sphere_radius * ( rlat - 35.0*dtr) - factor = 1.0-0.5*deltay/b - r1 = factor * 0.1 * exp( -(deltay/b)**2 ) * cos(pi*deltay/b) - ulon = r1 - ulat = 0.0 - call transform_from_lonlat_to_xyz(xin,yin,zin,ulon,ulat,ux,uy,uz) - if(boundaryEdgeNew(1,iEdge).eq.1) then - surfaceWindStressNew(iEdge) = 0.0 - else - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - p(1) = xCellNew(iCell1); p(2) = yCellNew(iCell1); p(3) = zCellNew(iCell1) - q(1) = xCellNew(iCell2); q(2) = yCellNew(iCell2); q(3) = zCellNew(iCell2) - q = q - p - call unit_vector_in_3space(q) - surfaceWindStressNew(iEdge) = ux*q(1) + uy*q(2) + uz*q(3) - endif - - enddo - - ! set up some restoring in case we want diabatic forcing - ! T gradient is 0.5C per degree - do iCell=1,nCellsNew - rlat = latCellNew(iCell) - temperatureRestoreNew(iCell) = 15.0 - 0.5*(rlat/dtr - 35.0) - salinityRestoreNew(iCell) = salinityNew(1,1,iCell) - enddo - - ! initialized boundary layer fields - boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - -elseif (initial_conditions.eq.'isopycnal_3layer') then - - fCellNew(:) = 0.0 - fEdgeNew(:) = 0.0 - fVertexNew(:) = 0.0 - bottomDepthNew(:) = 0.0 - normalVelocityNew(:,:,:) = 0.0 - - ! setting for three levels - Set h values for isopycnal system - write(6,*) ' setting three levels for isopycnal system' - layerThicknessNew(1,1,:) = 500.0 - layerThicknessNew(1,2,:) = 1250.0 - layerThicknessNew(1,3,:) = 3250.0 - bottomDepthNew(:) = -( layerThicknessNew(1,1,:) + layerThicknessNew(1,2,:) + layerThicknessNew(1,3,:) ) - - ! Noise is meant to make the flow unstable at some point - ! Not needed for all simulations - write(6,*) ' adding noise to layer thickness' - r = 0.0 - do i=1,nCellsNew - work1(i) = float(i) / float(nCellsNew) - call random_number(work1(i)) - r = r + work1(i) - enddo - r = r/float(nCells) - work1(:) = work1(:) - r - layerThicknessNew(1,1,:) = layerThicknessNew(1,1,:) + 1.0*work1(:) - layerThicknessNew(1,2,:) = layerThicknessNew(1,2,:) - 1.0*work1(:) - - ! Specify Density values for isopycnal levels - write(6,*) ' setting density - depricate soon' - densityNew(1,:,:) = 1010.0 - densityNew(1,2,:) = 1011.0 - densityNew(1,3,:) = 1012.0 - - eos_linear_alpha = 2.55e-1 - eos_linear_beta = 7.64e-1 - eos_linear_Tref = 19.0 - eos_linear_Sref = 35.0 - eos_linear_densityref = 1025.022 - - ! set salinity for isopycnal levels - salinityNew = eos_linear_Sref - - ! set temperature for isopycnal levels. Just invert linear eos. - write(6,*) ' setting temperature' - do k=1,nVertLevelsMOD - temperatureNew(1,k,:) = eos_linear_Tref + (eos_linear_densityref - densityNew(1,k,:))/eos_linear_alpha - enddo - - ! set forcing for isopycnal levels - write(6,*) 'setting surfaceWindStressNew - wind forcing' - surfaceWindStressNew = 0.0 - if(on_a_sphere.eq.'YES') then - latmin = -60*dtr - latmax = -10*dtr - latmid = -35*dtr - latmin = minval(latEdgeNew) - latmax = maxval(latEdgeNew) - latmid = (latmin+latmax)/2.0 - r = 10.0*dtr - - write(6,*) 'surfaceWindStressNew info', latmin, latmax, latmid, r - do i = 1,nEdgesNew - lattmp = latEdgeNew(i) - iCell1 = cellsOnEdgeNew(1,i) - iCell2 = cellsOnEdgeNew(2,i) - if(iCell1>0.and.iCell2>0) then - pert = surfaceWindStressMax * exp(-(lattmp-latmid)**2/(r**2)) - - ulat = latEdgeNew(i) - ulon = lonEdgeNew(i) + 0.05 - - call convert_lx(xin, yin, zin, 1.0, ulat, ulon) - - xin = xin - xEdgeNew(i) - yin = yin - yEdgeNew(i) - zin = zin - zEdgeNew(i) - - dotProd = sqrt(xin**2 + yin**2 + zin**2) - xin = xin/dotProd - yin = yin/dotProd - zin = zin/dotProd - - dotProd = normalsNew(1,i)*xin + normalsNew(2,i)*yin + normalsNew(3,i)*zin - - surfaceWindStressNew(i) = pert * dotProd - write(8,*) lattmp,pert,dotProd - endif - enddo - else - ymin = minval(yEdgeNew) - ymax = maxval(yEdgeNew) - r = 3.0e5 - ymid = (ymax+ymin)/2 - do i = 1,nEdgesNew - ytmp = yEdgeNew(i) - iCell1 = cellsOnEdgeNew(1,i) - iCell2 = cellsOnEdgeNew(2,i) - if(iCell1>0.and.iCell2>0) then - pert = surfaceWindStressMax * exp(-(ytmp-ymid)**2/(r**2)) - write(8,*) ytmp,pert - surfaceWindStressNew(i) = pert * normalsNew(1,i) - endif - enddo - endif - write(6,*) ' surfaceWindStressNew ', minval(surfaceWindStressNew), maxval(surfaceWindStressNew) - -elseif (initial_conditions.eq.'realistic_PHC') then - -surfaceWindStressNew = 0.0 -surfaceWindStressNew = 0.0 -do iEdge=1,nEdgesNew - xin = xEdgeNew(iEdge) - yin = yEdgeNew(iEdge) - zin = zEdgeNew(iEdge) - rlon = lonEdgeNew(iEdge)/dtr - rlat = latEdgeNew(iEdge)/dtr - ix = nint(rlon/0.1 - 0.05) + nu_lon + 1 - ix = mod(ix,nu_lon)+1 - iy = nu_lat - do jcount=1,nu_lat - if(u_lat(jcount).gt.rlat) then - iy = jcount - exit - endif - enddo - ulon = TAUX(ix,iy) - ulat = TAUY(ix,iy) - !write(6,*) rlon, t_lon(ix), rlat, t_lat(iy) - - call transform_from_lonlat_to_xyz(xin,yin,zin,ulon,ulat,ux,uy,uz) - if(boundaryEdgeNew(1,iEdge).eq.1) then - surfaceWindStressNew(iEdge) = 0.0 - surfaceWindStressNewZonal(iEdge) = 0.0 - surfaceWindStressNewMeridional(iEdge) = 0.0 - else - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - p(1) = xCellNew(iCell1); p(2) = yCellNew(iCell1); p(3) = zCellNew(iCell1) - q(1) = xCellNew(iCell2); q(2) = yCellNew(iCell2); q(3) = zCellNew(iCell2) - q = q - p - call unit_vector_in_3space(q) - surfaceWindStressNew(iEdge) = ux*q(1) + uy*q(2) + uz*q(3) - surfaceWindStressNewZonal(iEdge) = cos(angleEdgeNew(iEdge)) * surfaceWindStressNew(iEdge) - surfaceWindStressNewMeridional(iEdge) = sin(angleEdgeNew(iEdge)) * surfaceWindStressNew(iEdge) - endif - -enddo - - -! for acc runs, increase wind strength for Southern Ocean -if (amplify_acc_wind) then - print *, 'amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat' - print *, amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat - do iEdge=1,nEdgesNew - surfaceWindStressNew(iEdge) = surfaceWindStressNew(iEdge) & - * (1.0 + (amp_wind_factor-1.0)*0.5 & - *(1.0-tanh( (latEdgeNew(iEdge)/dtr-amp_wind_center_lat)/amp_wind_spread_lat) ) ) - enddo -endif - -!set tracers at a first guess -temperatureNew = -99.0 -salinityNew = -99.0 -do iCell=1,nCellsNew -do k = 1,maxLevelCellNew(iCell) - temperatureNew(1,k,iCell) = 20.0 - 10.0*k/nVertLevelsMod - salinityNew(1,k,iCell) = 34.0 ! salinity -enddo -enddo - -! update T and S field with PHC data -if( load_phc_IC) then -iNoData = 0 -do iCell=1,nCellsNew - layerThicknessNew(1,:,iCell) = dz(:) - ! if(mod(iCell,100).eq.0) write(6,*) 'load_phc_IC t and s',iCell - rlon = lonCellNew(iCell)/dtr - rlat = latCellNew(iCell)/dtr - do j = 1, nt_lon - if(t_lon(j).gt.rlon) then - ix = j - exit - end if - enddo - iy = nt_lat - do j=1,nt_lat - if(t_lat(j).gt.rlat) then - iy = j - exit - endif - enddo - do k=1,maxLevelCellNew(iCell) - ndata = 0; temp_t = 0; temp_s = 0; kdata(:) = 0 - - ndata = ndata + 1 - temp_t = temp_t + TEMP(ix,iy,k) - temp_s = temp_s + SALT(ix,iy,k) - - if(ndata.gt.0) then - temperatureNew(1,k,iCell) = temp_t / float(ndata) - salinityNEW(1,k,iCell) = temp_s / float(ndata) - kdata(k) = 1 - else - if(k.eq.1) iNoData = iNoData + 1 - if(k.ge.3) then - if(kdata(k-1).eq.1) maxLevelCellNew(iCell) = k-1 - endif - endif - enddo -enddo - -! do a couple of smoothing passes -do iter=1,5 -do iCell=1,nCellsNew -do k=1,maxLevelCellNew(iCell) - ndata=1 - temp_t = temperatureNew(1,k,iCell) - temp_s = salinityNew(1,k,iCell) - do j=1,nEdgesOnCellNew(iCell) - jCell = cellsOnCellNew(j,iCell) - if(jCell.gt.0) then - if(maxLevelCellNew(jCell).ge.k) then - temp_t = temp_t + temperatureNew(1,k,jCell) - temp_s = temp_s + salinityNew(1,k,jCell) - ndata = ndata + 1 - endif - endif - enddo - temperatureNew(1,k,iCell) = temp_t / ndata - salinityNew(1,k,iCell) = temp_s / ndata -enddo -enddo -write(6,*) maxval(temperatureNew(1,1,:)),maxval(salinityNew(1,1,:)) -enddo - -write(6,*) iNoData, nCellsNew - -temperatureRestoreNew(:) = temperatureNew(1,1,:) -salinityRestoreNew(:) = salinityNew(1,1,:) -boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - -endif ! load_phc_IC - -elseif (initial_conditions.eq.'realistic_WOCE') then - -surfaceWindStressNew = 0.0 -surfaceWindStressNew = 0.0 -do iEdge=1,nEdgesNew - xin = xEdgeNew(iEdge) - yin = yEdgeNew(iEdge) - zin = zEdgeNew(iEdge) - rlon = lonEdgeNew(iEdge)/dtr - rlat = latEdgeNew(iEdge)/dtr - ix = nint(rlon/0.1 - 0.05) + nu_lon + 1 - ix = mod(ix,nu_lon)+1 - iy = nu_lat - do jcount=1,nu_lat - if(t_lat(jcount).gt.rlat) then - iy = jcount - exit - endif - enddo - ulon = TAUX(ix,iy) - ulat = TAUY(ix,iy) - !write(6,*) rlon, t_lon(ix), rlat, t_lat(iy) - - call transform_from_lonlat_to_xyz(xin,yin,zin,ulon,ulat,ux,uy,uz) - if(boundaryEdgeNew(1,iEdge).eq.1) then - surfaceWindStressNew(iEdge) = 0.0 - surfaceWindStressNewZonal(iEdge) = 0.0 - surfaceWindStressNewMeridional(iEdge) = 0.0 - else - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - p(1) = xCellNew(iCell1); p(2) = yCellNew(iCell1); p(3) = zCellNew(iCell1) - q(1) = xCellNew(iCell2); q(2) = yCellNew(iCell2); q(3) = zCellNew(iCell2) - q = q - p - call unit_vector_in_3space(q) - surfaceWindStressNew(iEdge) = ux*q(1) + uy*q(2) + uz*q(3) - surfaceWindStressNewZonal(iEdge) = cos(angleEdgeNew(iEdge)) * surfaceWindStressNew(iEdge) - surfaceWindStressNewMeridional(iEdge) = sin(angleEdgeNew(iEdge)) * surfaceWindStressNew(iEdge) - endif - -! if(monthly_forcing) then -! do iMonth=1,nMonths -! ulon = TAUX_MONTHLY(ix,iy,iMonth) -! ulat = TAUY_MONTHLY(ix,iy,iMonth) -! call transform_from_lonlat_to_xyz(xin,yin,zin,ulon,ulat,ux,uy,uz) -! if(boundaryEdgeNew(1,iEdge).eq.1) then -! surfaceWindStressNew(iEdge) = 0.0 -! else -! iCell1 = cellsOnEdgeNew(1,iEdge) -! iCell2 = cellsOnEdgeNew(2,iEdge) -! p(1) = xCellNew(iCell1); p(2) = yCellNew(iCell1); p(3) = zCellNew(iCell1) -! q(1) = xCellNew(iCell2); q(2) = yCellNew(iCell2); q(3) = zCellNew(iCell2) -! q = q - p -! call unit_vector_in_3space(q) -! ! repeat -! surfaceWindStressNew(iEdge) = ux*q(1) + uy*q(2) + uz*q(3) -! endif -! enddo -! else -! surfaceWindStressNew(:) = 0.0 -! end if - -enddo - - -! for acc runs, increase wind strength for Southern Ocean -if (amplify_acc_wind) then - print *, 'amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat' - print *, amplify_acc_wind, amp_wind_factor, amp_wind_center_lat, amp_wind_spread_lat - do iEdge=1,nEdgesNew - surfaceWindStressNew(iEdge) = surfaceWindStressNew(iEdge) & - * (1.0 + (amp_wind_factor-1.0)*0.5 & - *(1.0-tanh( (latEdgeNew(iEdge)/dtr-amp_wind_center_lat)/amp_wind_spread_lat) ) ) - enddo -endif - -!set tracers at a first guess -temperatureNew = -99.0 -salinityNew = -99.0 -do iCell=1,nCellsNew -do k = 1,maxLevelCellNew(iCell) - temperatureNew(1,k,iCell) = 20.0 - 10.0*k/nVertLevelsMod - salinityNew(1,k,iCell) = 34.0 ! salinity -enddo -enddo - -! update T and S field with WOCE data -if(load_woce_IC) then -iNoData = 0 -do iCell=1,nCellsNew - layerThicknessNew(1,:,iCell) = dz(:) - ! if(mod(iCell,100).eq.0) write(6,*) 'load_woce_IC t and s',iCell - rlon = lonCellNew(iCell)/dtr - rlat = latCellNew(iCell)/dtr - ix = nint(rlon/0.1 - 0.05) + nt_lon + 1 - ix = mod(ix,nt_lon)+1 - iy = nt_lat - do j=1,nt_lat - if(t_lat(j).gt.rlat) then - iy = j - exit - endif - enddo - do k=1,maxLevelCellNew(iCell) - ndata = 0; temp_t = 0; temp_s = 0; kdata(:) = 0 - - do i=-15,15 - ixt = ix + 8*i - if(ixt.lt.1) then - ixt = ixt + nt_lon - elseif(ixt.gt.nt_lon) then - ixt = ixt - nt_lon - endif - do j=-15,15 - iyt = iy + 8*j - flag_lat = .true. - if(iyt.lt.1.or.iyt.gt.nt_lat) then - iyt = 1 - flag_lat = .false. - endif - if(TEMP(ixt,iyt,k).gt.-10.0.and.flag_lat) then - ndata = ndata + 1 - temp_t = temp_t + TEMP(ixt,iyt,k) - temp_s = temp_s + SALT(ixt,iyt,k) - endif - enddo - enddo - - if(ndata.gt.0) then - temperatureNew(1,k,iCell) = temp_t / float(ndata) - salinityNEW(1,k,iCell) = temp_s / float(ndata) - kdata(k) = 1 - else - if(k.eq.1) iNoData = iNoData + 1 - if(k.ge.3) then - if(kdata(k-1).eq.1) maxLevelCellNew(iCell) = k-1 - endif - endif - enddo -enddo - -! do a couple of smoothing passes -do iter=1,5 -do iCell=1,nCellsNew -do k=1,maxLevelCellNew(iCell) - ndata=1 - temp_t = temperatureNew(1,k,iCell) - temp_s = salinityNew(1,k,iCell) - do j=1,nEdgesOnCellNew(iCell) - jCell = cellsOnCellNew(j,iCell) - if(jCell.gt.0) then - if(maxLevelCellNew(jCell).ge.k) then - temp_t = temp_t + temperatureNew(1,k,jCell) - temp_s = temp_s + salinityNew(1,k,jCell) - ndata = ndata + 1 - endif - endif - enddo - temperatureNew(1,k,iCell) = temp_t / ndata - salinityNew(1,k,iCell) = temp_s / ndata -enddo -enddo -write(6,*) maxval(temperatureNew(1,1,:)),maxval(salinityNew(1,1,:)) -enddo - -write(6,*) iNoData, nCellsNew - -temperatureRestoreNew(:) = temperatureNew(1,1,:) -salinityRestoreNew(:) = salinityNew(1,1,:) -boundaryLayerDepthNew(:) = hZLevel(1) + hZLevel(2) - 1.0e-4 - -!if(monthly_forcing) then - !do iMonth=1,nMonths - !iNoData = 0 - !do iCell=1,nCellsNew - !! if(mod(iCell,100).eq.0) write(6,*) 'load_woce_IC t and s RESTORE',iCell - !rlon = lonCellNew(iCell)/dtr - !rlat = latCellNew(iCell)/dtr - !ix = nint(rlon/0.1 - 0.05) + nt_lon + 1 - !ix = mod(ix,nt_lon)+1 - !iy = nt_lat - !do j=1,nt_lat - !if(t_lat(j).gt.rlat) then - !iy = j - !exit - !endif - !!enddo ! j - !k=1 - !ndata = 0; temp_t = 0; temp_s = 0 - !do i=-15,15 - !ixt = ix + 8*i - !if(ixt.lt.1) then - !ixt = ixt + nt_lon - !elseif(ixt.gt.nt_lon) then - !ixt = ixt - nt_lon - !endif - !do j=-15,15 - !iyt = iy + 8*j - !flag_lat = .true. - !if(iyt.lt.1.or.iyt.gt.nt_lat) then - !iyt = 1 - !flag_lat = .false. - !endif - !if(SST_MONTHLY(ixt,iyt,iMonth).gt.-10.0.and.flag_lat) then - !ndata = ndata + 1 - !temp_t = temp_t + SST_MONTHLY(ixt,iyt,iMonth) - !temp_s = temp_s + SSS_MONTHLY(ixt,iyt,iMonth) - !endif - !enddo !j - !enddo !i - ! - !if(ndata.gt.0) then - !temperatureRestoreMonthlyNew(iMonth,iCell) = temp_t / float(ndata) - !salinityRestoreMonthlyNew(iMonth,iCell) = temp_s / float(ndata) - !else - !temperatureRestoreMonthlyNew(iMonth,iCell) = temperatureNew(1,1,iCell) - !salinityRestoreMonthlyNew(iMonth,iCell) = salinityNew(1,1,iCell) - !endif - ! - !enddo ! iCell - !enddo ! iMonth - ! - !! do a couple of smoothing passes - !do iter=1,5 - !do iCell=1,nCellsNew - !k=1 - !ndata=1 - !temp_t = temperatureRestoreMonthlyNew(iMonth,iCell) - !temp_s = salinityRestoreMonthlyNew(iMonth,iCell) - !do j=1,nEdgesOnCellNew(iCell) - !jCell = cellsOnCellNew(j,iCell) - !if(jCell.gt.0) then - !if(maxLevelCellNew(jCell).ge.k) then - !temp_t = temp_t + temperatureRestoreMonthlyNew(iMonth,iCell) - !temp_s = temp_s + salinityRestoreMonthlyNew(iMonth,iCell) - !ndata = ndata + 1 - !endif - !endif - !enddo ! j - !temperatureRestoreMonthlyNew(iMonth,iCell) = temp_t / ndata - !salinityRestoreMonthlyNew(iMonth,iCell) = temp_s / ndata - !enddo ! iCell - !enddo ! iter -!else -! temperatureRestoreMonthlyNew(:,:) = 0.0 -! salinityRestoreMonthlyNew(:,:) = 0.0 -!end if - -endif ! load_woce_IC - -!repeat -!do iMonth=1,12 -! temperatureRestoreMonthlyNew(iMonth,:) = temperatureRestoreNew(:) -! salinityRestoreMonthlyNew(iMonth,:) = salinityRestoreNew(:) -!enddo - -else - - print *, ' Incorrect choice of initial_conditions: ',initial_conditions - stop - -endif ! initial_conditions - - ! set coriolis parameter for grid - write(6,*) ' setting Coriolis parameter' - if(on_a_sphere.eq.'YES') then - do i = 1,nVerticesNew - fVertexNew(i) = 2.0 * omega * sin(latVertexNew(i)) - enddo - - do i = 1,nEdgesNew - fEdgeNew(i) = 2.0 * omega * sin(latEdgeNew(i)) - enddo - - do i = 1,nCellsNew - fCellNew(i) = 2.0 * omega * sin(latCellNew(i)) - enddo - else - do i = 1,nVerticesNew - fVertexNew(i) = f0 + (yVertexNew(i) - ymid) * beta - enddo - - do i = 1,nEdgesNew - fEdgeNew(i) = f0 + (yEdgeNew(i) - ymid) * beta - enddo - - do i = 1,nCellsNew - fCellNew(i) = f0 + (yCellNew(i) - ymid) * beta - enddo - endif - -write(6,*) ' done get_init_conditions' - -end subroutine get_init_conditions - - -subroutine error_checking -real :: p(3), q(3), r(3), angle, s(3), t(3), dot, mindot, maxdot, b(vertexDegree) -real :: work(nCellsNew) - - -! write -write(6,*) -write(6,*) ' error checking ' -write(6,*) - -! check to see if every edge is normal to associated cells -mindot = 2 -maxdot = -2 -do iEdge=1,nEdgesNew - if(boundaryEdgeNew(1,iEdge).eq.1) cycle - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - p(1)=xCellNew(iCell1); p(2)=yCellNew(iCell1); p(3)=zCellNew(iCell1) - q(1)=xCellNew(iCell2); q(2)=yCellNew(iCell2); q(3)=zCellNew(iCell2) - r(1)=xEdgeNew(iEdge); r(2)=yEdgeNew(iEdge); r(3)=zEdgeNew(iEdge) - call unit_vector_in_3space(p) - call unit_vector_in_3space(q) - call unit_vector_in_3space(r) - t = q - p - s = r - p - call unit_vector_in_3space(t) - call unit_vector_in_3space(s) - dot = s(1)*t(1)+s(2)*t(2)+s(3)*t(3) - if(dot.lt.mindot) mindot=dot - if(dot.gt.maxdot) maxdot=dot -enddo -write(6,10) 'alignment of edges and cells (should be ones)', mindot, maxdot -10 format(a60,5x,2e15.5) - -! check to see if every segments connecting cells and vertices are orothogonal' -mindot = 2 -maxdot = -2 -do iEdge=1,nEdgesNew - if(boundaryEdgeNew(1,iEdge).eq.1) cycle - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - iVertex1 = verticesOnEdgeNew(1,iEdge) - iVertex2 = verticesOnEdgeNew(2,iEdge) - p(1)=xCellNew(iCell1); p(2)=yCellNew(iCell1); p(3)=zCellNew(iCell1) - q(1)=xCellNew(iCell2); q(2)=yCellNew(iCell2); q(3)=zCellNew(iCell2) - r(1)=xVertexNew(iVertex1); r(2)=yVertexNew(iVertex1); r(3)=zVertexNew(iVertex1) - s(1)=xVertexNew(iVertex2); s(2)=yVertexNew(iVertex2); s(3)=zVertexNew(iVertex2) - call unit_vector_in_3space(p) - call unit_vector_in_3space(q) - call unit_vector_in_3space(r) - call unit_vector_in_3space(s) - t = q - p - s = s - r - call unit_vector_in_3space(t) - call unit_vector_in_3space(s) - dot = s(1)*t(1)+s(2)*t(2)+s(3)*t(3) - if(dot.lt.mindot) mindot=dot - if(dot.gt.maxdot) maxdot=dot -enddo -write(6,10) 'orthogonality of cell and vertex edges (should be zeros)', mindot, maxdot - -! check that the kiteareas sum to the areatriangle -mindot = 2 -maxdot = -2 -do iVertex=1,nVerticesNew - b = 0 - do i=1,vertexDegree - b(i) = kiteAreasOnVertexNew(i,iVertex) - enddo - angle = sum(b) - if(angle - areaTriangleNew(iVertex).lt.mindot) mindot = angle - areaTriangleNew(iVertex) - if(angle - areaTriangleNew(iVertex).gt.maxdot) maxdot = angle - areaTriangleNew(iVertex) -enddo -write(6,10) ' error in sum of kites and triangles (should be zeroes)', mindot, maxdot - -! check that the kiteareas sum to the areaCell -mindot = 2 -maxdot = -2 -work = 0 -do iVertex=1,nVerticesNew - iCell1 = cellsOnVertexNew(1,iVertex) - iCell2 = cellsOnVertexNew(2,iVertex) - iCell3 = cellsOnVertexNew(3,iVertex) - if(iCell1.ne.0) work(iCell1) = work(iCell1) + kiteAreasOnVertexNew(1,iVertex) - if(iCell2.ne.0) work(iCell2) = work(iCell2) + kiteAreasOnVertexNew(2,iVertex) - if(iCell3.ne.0) work(iCell3) = work(iCell3) + kiteAreasOnVertexNew(3,iVertex) -enddo -mindot = minval(areaCellNew - work) -maxdot = maxval(areaCellNew - work) -write(6,10) ' error in sum of kites and cells (should be zeroes)', mindot, maxdot - -!check for connectivity inverses for cells/edges -do iCell=1,nCellsNew - do i=1,nEdgesOnCellNew(iCell) - iEdge=edgesOnCellNew(i,iCell) - if(iEdge.le.0) stop ' iEdge le 0' - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - if(iCell1.ne.iCell.and.iCell2.ne.iCell) stop ' cells/edges inverse failed' - enddo -enddo -write(6,*) ' cellsOnEdge and edgesOnCell are duals for every cell/edge combination' - -!check for connectivity inverses for cells/vertices -do iCell=1,nCellsNew - do i=1,nEdgesOnCellNew(iCell) - iVertex = verticesOnCellNew(i,iCell) - if(iVertex.le.0) stop ' iVertex le 0' - iCell1 = cellsOnVertexNew(1,iVertex) - iCell2 = cellsOnVertexNew(2,iVertex) - iCell3 = cellsOnVertexNew(3,iVertex) - ! This line may be commented out for quad grids: - if(iCell1.ne.iCell.and.iCell2.ne.iCell.and.iCell3.ne.iCell) stop ' cells/vertices inverse failed' - enddo -enddo -write(6,*) ' cellsOnVertex and verticesOnCell are duals for every cell/vertex combination' - -!check edgesOnEdge -do iEdge=1,nEdgesNew - iCell1 = cellsOnEdgeNew(1,iEdge) - iCell2 = cellsOnEdgeNew(2,iEdge) - if(nEdgesOnEdgeNew(iEdge).eq.0) then - if(boundaryEdgeNew(1,iEdge).ne.1) stop ' stopping boundaryEdgeNew' - endif - do i=1,nEdgesOnEdgeNew(iEdge) - jEdge = edgesOnEdgeNew(i,iEdge) - jCell1 = cellsOnEdgeNew(1,jEdge) - jCell2 = cellsOnEdgeNew(2,jEdge) - if(jCell1.ne.iCell1.and.jCell1.ne.iCell2) then - if(jCell2.ne.iCell1.and.jCell2.ne.iCell2) then - write(6,*) 'error in edgesOnEdge' - write(6,*) iCell1, iCell2, jCell1, jCell2 - stop - endif - endif - enddo -enddo -write(6,*) ' edgesOnEdge is consistent with cellsOnEdge' - -end subroutine error_checking - - -subroutine copy_dimensions - -maxEdgesNew = maxEdges -maxEdges2New = maxEdges2 -TWONew = TWO -vertexDegreeNew = vertexDegree -nVertLevelsNew = nVertLevelsMod - -write(6,*) -write(6,*) ' new dimensions ' -write(6,*) ' maxEdgesNew : ', maxEdgesNew -write(6,*) ' maxEdges2New : ', maxEdges2New -write(6,*) ' TWONew : ', TWONew -write(6,*) ' vertexDegreeNew : ', vertexDegreeNew -write(6,*) ' nVertLevelsNew : ', nVertLevelsNew - -end subroutine copy_dimensions - - - -subroutine read_grid -implicit none - -call read_netcdf_init(nCells, nEdges, nVertices, maxEdges,maxEdges2,& - nVertLevels,TWO,vertexDegree) - -write(6,*) ' init from grid ' -write(6,*) 'nCells :', nCells -write(6,*) 'nEdges :', nEdges -write(6,*) 'nVertices :', nVertices -write(6,*) 'maxEdges :', maxEdges -write(6,*) 'maxEdges2 :', maxEdges2 -write(6,*) 'nVertLevels :', nVertLevels -write(6,*) 'vertexDegree :', vertexDegree -write(6,*) 'TWO :', TWO - -allocate(xCell(nCells)) -allocate(yCell(nCells)) -allocate(zCell(nCells)) -allocate(latCell(nCells)) -allocate(lonCell(nCells)) -allocate(meshDensity(nCells)) -allocate(xEdge(nEdges)) -allocate(yEdge(nEdges)) -allocate(zEdge(nEdges)) -allocate(latEdge(nEdges)) -allocate(lonEdge(nEdges)) -allocate(xVertex(nVertices)) -allocate(yVertex(nVertices)) -allocate(zVertex(nVertices)) -allocate(latVertex(nVertices)) -allocate(lonVertex(nVertices)) -allocate(dcEdge(nEdges)) -allocate(dvEdge(nEdges)) - -allocate(indexToCellID(nCells)) -allocate(indexToEdgeID(nEdges)) -allocate(indexToVertexID(nVertices)) - -allocate(cellsOnEdge(TWO,nEdges)) -allocate(nEdgesOnCell(nCells)) -allocate(nEdgesOnEdge(nEdges)) -allocate(edgesOnCell(maxEdges,nCells)) -allocate(edgesOnEdge(maxEdges2,nEdges)) -allocate(weightsOnEdge(maxEdges2,nEdges)) - -allocate(angleEdge(nEdges)) -allocate(areaCell(nCells)) -allocate(areaTriangle(nVertices)) -allocate(cellsOnCell(maxEdges,nCells)) -allocate(verticesOnCell(maxEdges,nCells)) -allocate(verticesOnEdge(TWO,nEdges)) -allocate(edgesOnVertex(vertexDegree,nVertices)) -allocate(cellsOnVertex(vertexDegree,nVertices)) -allocate(kiteAreasOnVertex(vertexDegree,nVertices)) - -allocate(fCell(nEdges)) -allocate(fEdge(nEdges)) -allocate(fVertex(nVertices)) -allocate(bottomDepth(nCells)) -allocate(work1(nCells)) -allocate(normalVelocity(1,nVertLevels,nEdges)) -allocate(surfaceWindStress(nEdges)) -allocate(tangentialVelocity(1,nVertLevels,nEdges)) -allocate(layerThickness(1,nVertLevels,nCells)) -allocate(density(1,nVertLevels,nCells)) - -xCell=0; yCell=0; zCell=0; latCell=0; lonCell=0; meshDensity=1.0 -xEdge=0; yEdge=0; zEdge=0; latEdge=0; lonEdge=0 -xVertex=0; yVertex=0; zVertex=0; latVertex=0; lonVertex=0 - -indexToCellID=0; indexToEdgeID=0; indexToVertexID=0 -cellsOnEdge=0; nEdgesOnCell=0; edgesOnCell=0 -edgesOnEdge=0; weightsOnEdge=0 -angleEdge=0; areaCell=0; areaTriangle=0 -cellsOnCell=0; verticesOnCell=0; verticesOnEdge=0 -edgesOnVertex=0; cellsOnVertex=0; kiteAreasOnVertex=0 - -fEdge=0; fVertex=0; bottomDepth=0; surfaceWindStress=0; work1=0 -normalVelocity=0; tangentialVelocity=0; layerThickness=0; density=0 - - -call read_netcdf_fields( & - time, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - bottomDepth, & - normalVelocity, & - tangentialVelocity, & - layerThickness & - ) - -write(6,*) ' values from read grid, min/max' -write(6,*) ' latCell : ', minval(latCell), maxval(latCell) -write(6,*) ' lonCell : ', minval(lonCell), maxval(lonCell) -write(6,*) ' meshDensity : ', minval(meshDensity),maxval(meshDensity) -write(6,*) ' xCell : ', minval(xCell), maxval(xCell) -write(6,*) ' yCell : ', minval(yCell), maxval(yCell) -write(6,*) ' zCell : ', minval(zCell), maxval(zCell) -write(6,*) ' indexToCellID : ', minval(indexToCellID), maxval(indexToCellID) -write(6,*) ' latEdge : ', minval(latEdge), maxval(latEdge) -write(6,*) ' lonEdge : ', minval(lonEdge), maxval(lonEdge) -write(6,*) ' xEdge : ', minval(xEdge), maxval(xEdge) -write(6,*) ' yEdge : ', minval(yEdge), maxval(yEdge) -write(6,*) ' zEdge : ', minval(zEdge), maxval(zEdge) -write(6,*) ' indexToEdgeID : ', minval(indexToEdgeID), maxval(indexToEdgeID) -write(6,*) ' latVertex : ', minval(latVertex), maxval(latVertex) -write(6,*) ' lonVertex : ', minval(lonVertex), maxval(lonVertex) -write(6,*) ' xVertex : ', minval(xVertex), maxval(xVertex) -write(6,*) ' yVertex : ', minval(yVertex), maxval(yVertex) -write(6,*) ' zVertex : ', minval(zVertex), maxval(zVertex) -write(6,*) ' indexToVertexID : ', minval(indexToVertexID), maxval(indexToVertexID) -write(6,*) ' cellsOnEdge : ', minval(cellsOnEdge), maxval(cellsOnEdge) -write(6,*) ' nEdgesOnCell : ', minval(nEdgesOnCell), maxval(nEdgesOnCell) -write(6,*) ' nEdgesOnEdge : ', minval(nEdgesOnEdge), maxval(nEdgesOnEdge) -write(6,*) ' edgesOnCell : ', minval(edgesOnCell), maxval(edgesOnCell) -write(6,*) ' edgesOnEdge : ', minval(edgesOnEdge), maxval(edgesOnEdge) -write(6,*) ' weightsOnEdge : ', minval(weightsOnEdge), maxval(weightsOnEdge) -write(6,*) ' dvEdge : ', minval(dvEdge), maxval(dvEdge) -write(6,*) ' dcEdge : ', minval(dcEdge), maxval(dcEdge) -write(6,*) ' angleEdge : ', minval(angleEdge), maxval(angleEdge) -write(6,*) ' areaCell : ', minval(areaCell), maxval(areaCell) -write(6,*) ' areaTriangle : ', minval(areaTriangle), maxval(areaTriangle) -write(6,*) ' cellsOnCell : ', minval(cellsOnCell), maxval(cellsOnCell) -write(6,*) ' verticesOnCell : ', minval(verticesOnCell), maxval(verticesOnCell) -write(6,*) ' verticesOnEdge : ', minval(verticesOnEdge), maxval(verticesOnEdge) -write(6,*) ' edgesOnVertex : ', minval(edgesOnVertex), maxval(edgesOnVertex) -write(6,*) ' cellsOnVertex : ', minval(cellsOnVertex), maxval(cellsOnVertex) -write(6,*) ' kiteAreasOnVertex : ', minval(kiteAreasOnVertex), maxval(kiteAreasOnVertex) -write(6,*) ' fEdge : ', minval(fEdge), maxval(fEdge) -write(6,*) ' fVertex : ', minval(fVertex), maxval(fVertex) -write(6,*) ' bottomDepth : ', minval(bottomDepth), maxval(bottomDepth) -write(6,*) ' normalVelocity : ', minval(normalVelocity), maxval(normalVelocity) -write(6,*) ' tangentialVelocity : ', minval(tangentialVelocity), maxval(tangentialVelocity) -write(6,*) ' layerThickness : ', minval(layerThickness), maxval(layerThickness) - -end subroutine read_grid - - -subroutine write_grid -implicit none - -if (expand_from_unit_sphere) then - xCellNew = xCellNew * sphere_radius - yCellNew = yCellNew * sphere_radius - zCellNew = zCellNew * sphere_radius - xEdgeNew = xEdgeNew * sphere_radius - yEdgeNew = yEdgeNew * sphere_radius - zEdgeNew = zEdgeNew * sphere_radius - xVertexNew = xVertexNew * sphere_radius - yVertexNew = yVertexNew * sphere_radius - zVertexNew = zVertexNew * sphere_radius - dcEdgeNew = dcEdgeNew * sphere_radius - dvEdgeNew = dvEdgeNew * sphere_radius - areaCellNew = areaCellNew * (sphere_radius)**2 - areaTriangleNew = areaTriangleNew * (sphere_radius)**2 - kiteAreasOnVertexNew = kiteAreasOnVertexNew * (sphere_radius)**2 -endif - -call write_netcdf_init( & - nCellsNew, & - nEdgesNew, & - nVerticesNew, & - maxEdgesNew, & - nVertLevelsNew, & - vertexDegreeNew, & - sphere_radius, & - on_a_sphere & - ) - -call write_netcdf_fields( & - 1, & - latCellNew, & - lonCellNew, & - meshDensityNew, & - xCellNew, & - yCellNew, & - zCellNew, & - indexToCellIDNew, & - latEdgeNew, & - lonEdgeNew, & - xEdgeNew, & - yEdgeNew, & - zEdgeNew, & - indexToEdgeIDNew, & - latVertexNew, & - lonVertexNew, & - xVertexNew, & - yVertexNew, & - zVertexNew, & - indexToVertexIDNew, & - maxLevelCellNew, & - cellsOnEdgeNew, & - nEdgesOnCellNew, & - nEdgesOnEdgeNew, & - edgesOnCellNew, & - edgesOnEdgeNew, & - weightsOnEdgeNew, & - dvEdgeNew, & - dcEdgeNew, & - angleEdgeNew, & - areaCellNew, & - areaTriangleNew, & - cellsOnCellNew, & - verticesOnCellNew, & - verticesOnEdgeNew, & - edgesOnVertexNew, & - cellsOnVertexNew, & - kiteAreasOnVertexNew, & - fEdgeNew, & - fVertexNew, & - fCellNew, & - bottomDepthNew, & - boundaryEdgeNew, & - boundaryVertexNew, & - surfaceWindStressNew, & - surfaceWindStressNewZonal, & - surfaceWindStressNewMeridional, & - normalVelocityNew, & - layerThicknessNew, & - densityNew, & - temperatureNew, & - salinityNew, & - tracer1New, & - temperatureRestoreNew, & - salinityRestoreNew, & - boundaryLayerDepthNew, & - refBottomDepth & - ) - -call write_netcdf_finalize - -if (expand_from_unit_sphere) then - xCellNew = xCellNew / sphere_radius - yCellNew = yCellNew / sphere_radius - zCellNew = zCellNew / sphere_radius - xEdgeNew = xEdgeNew / sphere_radius - yEdgeNew = yEdgeNew / sphere_radius - zEdgeNew = zEdgeNew / sphere_radius - xVertexNew = xVertexNew / sphere_radius - yVertexNew = yVertexNew / sphere_radius - zVertexNew = zVertexNew / sphere_radius - dcEdgeNew = dcEdgeNew / sphere_radius - dvEdgeNew = dvEdgeNew / sphere_radius - areaCellNew = areaCellNew / (sphere_radius)**2 - areaTriangleNew = areaTriangleNew / (sphere_radius)**2 - kiteAreasOnVertexNew = kiteAreasOnVertexNew / (sphere_radius)**2 -endif - -end subroutine write_grid - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! -! Step 4: Check define_kmt routine for bottomDepth and kmt (maxLevelCell) variables -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine define_kmt -implicit none -real (kind=4), allocatable, dimension(:) :: x,y, work_kmt -real (kind=4), allocatable, dimension(:,:) :: ztopo -integer :: nx, ny, inx, iny, ix, iy, kmt_neighbor_max -integer :: kEmbayment -real :: pi, dtr, zdata, rlon, rlat, r, ymin, ymax, xmin, xmax -real :: latmin, latmax, lonmin, lonmax, ridgeDepth, maxdc -real :: b, H0, phi, gamma, distance, rlonC, rlatC -real :: y0_embayment, embayment_depth, embayment_slope, z -logical :: flag, kmt_flag -real :: y1,y2,y3, d1,d2,d3 - -pi = 4.0*atan(1.0) -dtr = pi / 180.0 - -allocate(kmt(nCells)) -kmt = 0 - -if (bottom_topography.eq.'realistic_ETOPO') then - - nx = 10800 - ny = 5400 - allocate(x(nx)) - allocate(y(ny)) - allocate(ztopo(nx,ny)) - x = 0.0 - y = 0.0 - ztopo = 0.0 - write(6,*) ' ztopo ', minval(ztopo), maxval(ztopo) - call read_topo_init( inx, iny) - if(inx.ne.nx) stop ' nx topo' - if(iny.ne.ny) stop ' ny topo' - call read_topo_fields(x,y,ztopo) - call read_topo_finalize() - write(6,*) minval(x), maxval(x), x(1) - write(6,*) minval(y), maxval(y), y(1) - write(6,*) minval(ztopo), maxval(ztopo) - - do iCell=1,nCells - - ! Convert from radians to degrees - rlon = lonCell(iCell) / dtr - rlat = latCell(iCell) / dtr - - ! Find nearest coordinate in topo file. - ! This is 1/30th degree topo data, so multiply degrees by 30 and - ! round to get index. - ix = nint((rlon+180)*30) + nx - ix = mod(ix,nx)+1 - iy = nint((rlat+90 )*30) - ix = max(1,ix); ix = min(nx,ix) - iy = max(1,iy); iy = min(ny,iy) - - zdata = ztopo(ix,iy) - - ! zdata is less than zero for ocean points. - if(zdata.lt.0.0) then - zdata = -zdata - bottomDepth(iCell) = zdata - r = 0 - kmt_flag=.false. - do k=1,nVertLevelsMod - if(.not.kmt_flag) then - r = r + dz(k) - if(r.gt.zdata) then - kmt(iCell) = k - kmt_flag = .true. - endif - endif - enddo - - ! zdata is deeper than deepest cell - if (kmt(iCell).eq.0) then - kmt(iCell)=nVertLevelsMod - bottomDepth(iCell) = refBottomDepth(nVertLevelsMod) - endif - - !write(6,*) kmt(iCell) - - endif - - ! if(zdata.lt.0.0) kmt(iCell) = nVertLevelsMod - - enddo - - deallocate(x) - deallocate(y) - deallocate(ztopo) - -elseif (bottom_topography.eq.'Ilicak2_overflow') then - - kmt = nVertLevelsMOD - - ridgeDepth = 500.0 - do iCell = 1,nCells - ! From Mehmet Ilicak: - ! depth=2000 - ! val1 = 500 is top of ridge - ! h(i,j) = val1 + 0.5*(depth-val1) * (1.0+TANH((lon(i,j)-40000.0)/7000.0)) - bottomDepth(iCell) = ridgeDepth + 0.5*(layer_thickness_total_max-ridgeDepth) * (1.0+tanh((yCell(iCell)-40000.0)/7000.0)) - - if (bottomDepth(iCell).lt.0.0.or. & - bottomDepth(iCell).gt.refBottomDepth(nVertLevelsMOD)) then - print *, 'error: bottomDepth cannot be less than zero or greater than refBottomDepth(nVertLevels)' - print *, 'iCell, bottomDepth(iCell):', iCell, bottomDepth(iCell) - exit - end if - - do k=1,nVertLevelsMOD - if (bottomDepth(iCell).le.refBottomDepth(k)) then - kmt(iCell) = k - exit - endif - end do - - enddo - -elseif (bottom_topography.eq.'Ilicak2_overflow_sigma') then - - ridgeDepth = 500.0 - do iCell = 1,nCells - ! From Mehmet Ilicak: - ! depth=2000 - ! val1 = 500 is top of ridge - ! h(i,j) = val1 + 0.5*(depth-val1) * (1.0+TANH((lon(i,j)-40000.0)/7000.0)) - bottomDepth(iCell) = ridgeDepth + 0.5*(layer_thickness_total_max-ridgeDepth) * (1.0+tanh((yCell(iCell)-40000.0)/7000.0)) - - if (bottomDepth(iCell).lt.0.0.or. & - bottomDepth(iCell).gt.refBottomDepth(nVertLevelsMOD)) then - print *, 'error: bottomDepth cannot be less than zero or greater than refBottomDepth(nVertLevels)' - print *, 'iCell, bottomDepth(iCell):', iCell, bottomDepth(iCell) - exit - end if - - enddo - - ! for sigma coordinates, set kmt to the max level. - kmt = nVertLevelsMOD - -elseif (bottom_topography.eq.'sub_ice_shelf_test3') then - - ! points 1 and 2 are where angles on ice shelf are located. - ! point 3 is at the surface. - ! d variables are total water thickness below ice shelf. - y1= 30.0e3 - y2= 90.0e3 - y3=150.0e3 - d1=refBottomDepth(nVertLevelsMOD) - d2=refBottomDepth(nVertLevelsMOD) - 250.0 - d3=refBottomDepth(nVertLevelsMOD) - - ! set up sub ice shelf thicknesses - do iCell=1,nCells - if (yCell(iCell) < y1 .or. yCell(iCell) > y3) then - kmt(iCell) = nVertLevelsMOD - bottomDepth(iCell) = refBottomDepth(nVertLevelsMOD) - else - if (yCell(iCell) < y2 ) then - bottomDepth(iCell) = d1 + (d2-d1)*(yCell(iCell)-y1)/(y2-y1) - else - bottomDepth(iCell) = d2 + (d3-d2)*(yCell(iCell)-y2)/(y3-y2) - endif - - do k=1,nVertLevelsMOD - if (bottomDepth(iCell).le.refBottomDepth(k)) then - kmt(iCell) = k - exit - endif - end do - ! reset bottomDepth to it's full cell value - bottomDepth(iCell) = refBottomDepth(kmt(iCell)) - endif - - - enddo - -elseif (bottom_topography.eq.'DOME_3D_overflow') then - - y0_embayment = 600.0e3 ! y location of beginning of embayment - embayment_depth =600 ! depth of embayment, m - embayment_slope = 0.01 ! slope - - ! The first level where z is deeper than zmid, set k-level of embayment - do k=2,nVertLevelsMod - if ((refBottomDepth(k)+refBottomDepth(k-1))/2.0.gt.embayment_depth) then - kEmbayment = k - exit - endif - enddo - print *, 'kEmbayment',kEmbayment - do iCell=1,nCells - if (yCell(iCell).gt.y0_embayment) then - ! note: I made embayment 200km wide for the 50km rez hex case, so - ! it is always wider than 1 cell. For higher rez, change this - ! to 1700 to 1800. - if (xCell(iCell).lt.1600.0e3.or. & - xCell(iCell).gt.1800.0e3 ) then - kmt(iCell)=0 - else - kmt(iCell) = kEmbayment - endif - bottomDepth(iCell) = embayment_depth - - else - ! compute depth based on y coordinate - bottomDepth(iCell) = min(embayment_depth + embayment_slope*(y0_embayment - yCell(iCell)),refBottomDepth(nVertLevelsMOD)) - ! default depth is - kmt(iCell) = nVertLevelsMod - do k=2,nVertLevelsMod - ! The first level where z is deeper than zmid, set kmt. - if ((refBottomDepth(k)+refBottomDepth(k-1))/2.0.gt.bottomDepth(iCell)) then - kmt(iCell) = k - exit - endif - enddo - endif - - ! If using no partial bottom cells, use this: - if (kmt(iCell).gt.0) then - bottomDepth(iCell) = refBottomDepth(kmt(iCell)) - endif - - enddo - - - -elseif (bottom_topography.eq.'SOMA_Circular_Basin') then - - pi = 4.0*atan(1.0) - dtr = pi / 180.0 - b = 1.25e6 - phi = 0.1 - H0 = 2500.0 - gamma = -0.4 - - kmt = 0 - - do iCell=1,nCells - rlon = lonCell(iCell) - rlat = latCell(iCell) - rlonC = 0.0 - rlatC = 35.0*dtr - - r = sqrt( sin(0.5*(rlatC-rlat))**2 + & - cos(rlat)*cos(rlatC)*sin(0.5*(rlonC-rlon))**2 ) - distance = 2.*sphere_radius*asin(r) - - r = 1.0 - distance**2 / b**2 - - if(r.gt.gamma) then - zdata = -100.0 - (H0-100.0)/2.0 * (1.0 + tanh(r/phi)) - else - zdata = 100.0 - endif - bottomDepth(iCell) = -zdata - - - if(zdata.lt.0.0) then - zdata = -zdata - r = 0 - kmt_flag=.false. - do k=1,nVertLevelsMod - if(.not.kmt_flag) then - r = r + dz(k) - if(r.gt.zdata) then - kmt(iCell) = k - bottomDepth(iCell) = refBottomDepth(k) - kmt_flag = .true. - endif - endif - enddo - if(kmt(iCell).eq.0) kmt(iCell)=nVertLevelsMod - endif - - enddo - - -elseif (bottom_topography.eq.'flat_bottom') then - - kmt = nVertLevelsMOD - bottomDepth = refBottomDepth(nVertLevelsMOD) - -else - - print *, ' Incorrect choice of bottom_topography: ',bottom_topography - stop - -endif - -if (cut_domain_from_sphere) then - latmin = -30*dtr - latmax = +30*dtr - lonmin = +10*dtr - lonmax = +70*dtr - write(6,*) ' lat min ', latmin - write(6,*) ' lat max ', latmax - where(latCell.lt.latmin) kmt = 0 - where(latCell.gt.latmax) kmt = 0 - where(lonCell.lt.lonmin) kmt = 0 - where(lonCell.gt.lonmax) kmt = 0 -endif - -if (solid_boundary_in_y) then - ymin = minval(yCell) - write(6,*) ' minimum yCell ', ymin - ymax = maxval(yCell) - write(6,*) ' maximum yCell ', ymax - where(yCell.lt.1.001*ymin) kmt = 0 - where(yCell.gt.0.999*ymax) kmt = 0 -endif - -if (solid_boundary_in_x) then - maxdc = maxval(dcEdge) - xmin = minval(xCell) - write(6,*) ' minimum xCell ', xmin - xmax = maxval(xCell) - write(6,*) ' maximum xCell ', xmax - where(xCell.lt.xmin+maxdc/1.5) kmt = 0 - where(xCell.gt.xmax-maxdc/1.5) kmt = 0 -endif - - -allocate(work_kmt(nCells)) -work_kmt = 0.0 -where(kmt.eq.0) work_kmt=1.0 -write(6,*) 'number of cells culled ',sum(work_kmt) -deallocate(work_kmt) - - -! Eliminate isolated ocean cells, and make these isolated deep cells -! flush with the deepest neighbor. -do iCell=1,nCells - kmt_neighbor_max = 0 - do j=1,nEdgesOnCell(iCell) - iCell1 = cellsOnCell(j,iCell) - kmt_neighbor_max = max(kmt_neighbor_max,kmt(iCell1)) - enddo - if (kmt(iCell).gt.kmt_neighbor_max) then - kmt(iCell) = kmt_neighbor_max - bottomDepth(iCell) = refBottomDepth(kmt(iCell)) - endif -enddo - -if(eliminate_inland_seas) then -call eliminateLoops(nCells,nEdges,nVertices,maxEdges,vertexDegree, & - nEdgesOnCell, cellsOnCell, verticesOnEdge, cellsOnVertex, edgesOnCell, lonCell, latCell, & - xCell, yCell, zCell, xEdge, yEdge, zEdge, xVertex, yVertex, zVertex, & - KMT) -endif - -! do not allow land or PBCs in top layers -k = min(top_layers_without_land,nVertLevelsMOD) -where(kmt.gt.0.and.kmt.le.k) - bottomDepth = refBottomDepth(k) - kmt=k -endwhere - -end subroutine define_kmt - - - -subroutine define_mapping -implicit none - -allocate(cellMap(nCells)) -allocate(edgeMap(nEdges)) -allocate(vertexMap(nVertices)) -cellMap = 0; edgeMap = 0; vertexMap = 0 - -j=1 -do i=1,nCells -if(kmt(i).ne.0) then - cellMap(i) = j - j=j+1 -endif -write(10,*) i, cellMap(i) -enddo - -j=1 -do i=1,nEdges -iCell1 = cellsOnEdge(1,i) -iCell2 = cellsOnEdge(2,i) -if(kmt(iCell1).ne.0.or.kmt(iCell2).ne.0) then - edgeMap(i)=j - j=j+1 -endif -write(11,*) i,edgeMap(i) -enddo - -j=1 -do i=1,nVertices -iCell1 = cellsOnVertex(1,i) -iCell2 = cellsOnVertex(2,i) -iCell3 = cellsOnVertex(3,i) -if(kmt(iCell1).ne.0.or.kmt(iCell2).ne.0.or.kmt(iCell3).ne.0) then - vertexMap(i)=j - j=j+1 -endif -write(12,*) i,vertexMap(i) -enddo - -nCellsNew = 0 -do i=1,nCells -if(cellMap(i).ne.0) nCellsNew = nCellsNew + 1 -enddo - -nEdgesNew = 0 -do i=1,nEdges -if(edgeMap(i).ne.0) nEdgesNew = nEdgesNew + 1 -enddo - -nVerticesNew = 0 -do i=1,nVertices -if(vertexMap(i).ne.0) nVerticesNew = nVerticesNew + 1 -enddo - -write(6,*) ' mesh mapping found ' -write(6,*) nCells, nCellsNew -write(6,*) nEdges, nEdgesNew -write(6,*) nVertices, nVerticesNew - -allocate(indexToCellIDNew(nCellsNew)) -allocate(indexToEdgeIDNew(nEdgesNew)) -allocate(indexToVertexIDNew(nVerticesNew)) -indextoCellIDNew = 0; indexToEdgeIDNew = 0; indexToVertexIDNew = 0 - -do i=1,nCellsNew -indexToCellIDNew(i)=i -enddo - -do i=1,nEdgesNew -indexToEdgeIDNew(i)=i -enddo - -do i=1,nVerticesNew -indexToVertexIDNew(i)=i -enddo - -end subroutine define_mapping - - -subroutine map_vectors -implicit none - -allocate(xCellNew(nCellsNew)) -allocate(yCellNew(nCellsNew)) -allocate(zCellNew(nCellsNew)) -allocate(normalsNew(3,nEdgesNew)) -allocate(latCellNew(nCellsNew)) -allocate(lonCellNew(nCellsNew)) -allocate(meshDensityNew(nCellsNew)) -allocate(meshSpacingNew(nCellsNew)) -allocate(xEdgeNew(nEdgesNew)) -allocate(yEdgeNew(nEdgesNew)) -allocate(zEdgeNew(nEdgesNew)) -allocate(latEdgeNew(nEdgesNew)) -allocate(lonEdgeNew(nEdgesNew)) -allocate(xVertexNew(nVerticesNew)) -allocate(yVertexNew(nVerticesNew)) -allocate(zVertexNew(nVerticesNew)) -allocate(latVertexNew(nVerticesNew)) -allocate(lonVertexNew(nVerticesNew)) -allocate(dcEdgeNew(nEdgesNew)) -allocate(dvEdgeNew(nEdgesNew)) -allocate(angleEdgeNew(nEdgesNew)) -allocate(areaCellNew(nCellsNew)) -allocate(areaTriangleNew(nVerticesNew)) -allocate(maxLevelCellNew(nCellsNew)) - -allocate(fCellNew(nEdgesNew)) -allocate(fEdgeNew(nEdgesNew)) -allocate(fVertexNew(nVerticesNew)) -allocate(bottomDepthNew(nCellsNew)) -allocate(surfaceWindStressNew(nEdgesNew)) -allocate(surfaceWindStressNewZonal(nEdgesNew)) -allocate(surfaceWindStressNewMeridional(nEdgesNew)) -allocate(normalVelocityNew(1,nVertLevelsNew,nEdgesNew)) -allocate(layerThicknessNew(1,nVertLevelsNew,nCellsNew)) -allocate(densityNew(1,nVertLevelsNew,nCellsNew)) -allocate(temperatureNew(1,nVertLevelsNew,nCellsNew)) -allocate(salinityNew(1,nVertLevelsNew,nCellsNew)) -allocate(tracer1New(1,nVertLevelsNew,nCellsNew)) - -allocate(temperatureRestoreNew(nCellsNew)) -allocate(salinityRestoreNew(nCellsNew)) -allocate(boundaryLayerDepthNew(nCellsNew)) - -xCellNew=0; yCellNew=0; zCellNew=0; latCellNew=0; lonCellNew=0; meshDensityNew=1.0; meshSpacingNew=0.0 -xEdgeNew=0; yEdgeNew=0; zEdgeNew=0; latEdgeNew=0; lonEdgeNew=0 -xVertexNew=0; yVertexNew=0; zVertexNew=0; latVertexNew=0; lonVertexNew=0 - -fCellNew=0; fEdgeNew=0; fVertexNew=0; bottomDepthNew=0; surfaceWindStressNew = 0; surfaceWindStressNewZonal = 0.0; surfaceWindStressNewMeridional = 0.0; -normalVelocityNew=0; layerThicknessNew=0; densityNew=0 -temperatureNew=0; salinityNew=0; tracer1New=0; - -temperatureRestoreNew = 0.0 -salinityRestoreNew = 0.0 -boundaryLayerDepthNew = 0.0 - -do i=1,nCells -jNew = cellMap(i) -if(jNew.ne.0) then - xCellNew(jNew)=xCell(i) - yCellNew(jNew)=yCell(i) - zCellNew(jNew)=zCell(i) - latCellNew(jNew)=latCell(i) - lonCellNew(jNew)=lonCell(i) - meshDensityNew(jNew)=meshDensity(i) - areaCellNew(jNew)=areaCell(i) - maxLevelCellNew(jNew) = kmt(i) - bottomDepthNew(jNew) = bottomDepth(i) -endif -enddo - -do i=1,nEdges -jNew = edgeMap(i) -if(jNew.ne.0) then - xEdgeNew(jNew)=xEdge(i) - yEdgeNew(jNew)=yEdge(i) - zEdgeNew(jNew)=zEdge(i) - latEdgeNew(jNew)=latEdge(i) - lonEdgeNew(jNew)=lonEdge(i) - dcEdgeNew(jNew) = dcEdge(i) - dvEdgeNew(jNew) = dvEdge(i) - fCellNew(jNew) = fCell(i) - fEdgeNew(jNew) = fEdge(i) - angleEdgeNew(jNew) = angleEdge(i) -endif -enddo - -do i=1,nVertices -jNew = vertexMap(i) -if(jNew.ne.0) then - xVertexNew(jNew)=xVertex(i) - yVertexNew(jNew)=yVertex(i) - zVertexNew(jNew)=zVertex(i) - latVertexNew(jNew)=latVertex(i) - lonVertexNew(jNew)=lonVertex(i) - fVertexNew(jNew)=fVertex(i) - areaTriangleNew(jNew)=areaTriangle(i) -endif -enddo - -deallocate(xCell) -deallocate(yCell) -deallocate(zCell) -deallocate(latCell) -deallocate(lonCell) -deallocate(meshDensity) -deallocate(xEdge) -deallocate(yEdge) -deallocate(zEdge) -deallocate(latEdge) -deallocate(lonEdge) -deallocate(xVertex) -deallocate(yVertex) -deallocate(zVertex) -deallocate(latVertex) -deallocate(lonVertex) -deallocate(dcEdge) -deallocate(dvEdge) -!deallocate(bottomDepth) - -end subroutine map_vectors - - - -subroutine map_connectivity -implicit none - -allocate(cellsOnEdgeNew(TWONew,nEdgesNew)) -allocate(boundaryEdgeNew(nVertLevelsNew,nEdgesNew)) -allocate(flipVerticesOnEdgeOrdering(nEdgesNew)) -cellsOnEdgeNew(:,:) = 0 -boundaryEdgeNew(:,:) = 0 -flipVerticesOnEdgeOrdering(:) = 0 -do iEdge=1,nEdges -if(edgeMap(iEdge).eq.0) cycle -iEdgeNew = edgeMap(iEdge) -iCell1 = cellsOnEdge(1,iEdge) -iCell2 = cellsOnEdge(2,iEdge) -iCell1New = cellMap(iCell1) -iCell2New = cellMap(iCell2) -cellsOnEdgeNew(1,iEdgeNew) = iCell1New -cellsOnEdgeNew(2,iEdgeNew) = iCell2New -if(iCell1New.eq.0.or.iCell2New.eq.0) boundaryEdgeNew(:,iEdgeNew) = 1 -if(iCell1New.eq.0.and.iCell2New.eq.0) stop "cellsOnEdge" -if(iCell1New.eq.0) then - cellsOnEdgeNew(1,iEdgeNew) = iCell2New - cellsOnEdgeNew(2,iEdgeNew) = iCell1New - flipVerticesOnEdgeOrdering(iEdgeNew) = 1 -endif -enddo -deallocate(cellsOnEdge) - -allocate(verticesOnEdgeNew(TWONew,nEdgesNew)) -allocate(boundaryVertexNew(nVertLevelsNew,nVerticesNew)) -verticesOnEdgeNew(:,:) = 0 -boundaryVertexNew(:,:) = 0 -do iEdge=1,nEdges -if(edgeMap(iEdge).eq.0) cycle -iEdgeNew = edgeMap(iEdge) -iVertex1 = VerticesOnEdge(1,iEdge) -iVertex2 = VerticesOnEdge(2,iEdge) -iVertex1New = vertexMap(iVertex1) -iVertex2New = vertexMap(iVertex2) -if(iVertex1New.eq.0.or.iVertex2New.eq.0) stop "verticesOnEdge" -if(flipVerticesOnEdgeOrdering(iEdgeNew).eq.0) then - verticesOnEdgeNew(1,iEdgeNew) = iVertex1New - verticesOnEdgeNew(2,iEdgeNew) = iVertex2New -else - verticesOnEdgeNew(1,iEdgeNew) = iVertex2New - verticesOnEdgeNew(2,iEdgeNew) = iVertex1New -endif -if(boundaryEdgeNew(1,iEdgeNew).eq.1) then - boundaryVertexNew(:,iVertex1New)=1 - boundaryVertexNew(:,iVertex2New)=1 -endif -enddo -deallocate(verticesOnEdge) - -allocate(nEdgesOnEdgeNew(nEdgesNew)) -allocate(edgesOnEdgeNew(maxEdges2,nEdgesNew)) -allocate(weightsOnEdgeNew(maxEdges2,nEdgesNew)) -nEdgesOnEdgeNew(:) = 0 -edgesOnEdgeNew(:,:) = 0 -weightsOnEdgeNew(:,:) = 0.0 -do iEdge=1,nEdges -if(edgeMap(iEdge).eq.0) cycle -iEdgeNew = edgeMap(iEdge) -if(boundaryEdgeNew(1,iEdgeNew).eq.1) then - nEdgesOnEdgeNew(iEdgeNew) = 0 - edgesOnEdgeNew(:,iEdgeNew) = 0 - weightsOnEdgeNew(:,iEdgeNew) = 0.0 -else - nEdgesOnEdgeNew(iEdgeNew) = nEdgesOnEdge(iEdge) - do i=1,nEdgesOnEdgeNew(iEdgeNew) - jEdge = edgesOnEdge(i,iEdge) - jEdgeNew = edgeMap(jEdge) - if(jEdgeNew.eq.0) stop "jEdgeNew" - edgesOnEdgeNew(i,iEdgeNew)=jEdgeNew - weightsOnEdgeNew(i,iEdgeNew) = weightsOnEdge(i,iEdge) - enddo -endif -enddo -deallocate(nEdgesOnEdge) -deallocate(edgesOnEdge) -deallocate(weightsOnEdge) - -allocate(cellsOnCellNew(maxEdges,nCellsNew)) -allocate(nEdgesOnCellNew(nCellsNew)) -cellsOnCellNew = 0 -nEdgesOnCellNew = 0 -do iCell=1,nCells -if(cellMap(iCell).eq.0) cycle -iCellNew = cellMap(iCell) -nEdgesOnCellNew(iCellNew)=nEdgesOnCell(iCell) -do i=1,nEdgesOnCellNew(iCellNew) -j = cellsOnCell(i,iCell) -jNew = cellMap(j) -cellsOnCellNew(i,iCellNew) = jNew -enddo -enddo -deallocate(cellsOnCell) -deallocate(nEdgesOnCell) - -allocate(edgesOnCellNew(maxEdgesNew,nCellsNew)) -edgesOnCellNew(:,:) = 0 -meshSpacingNew(:) = 0.0 -do iCell=1,nCells -if(cellMap(iCell).eq.0) cycle -iCellNew = cellMap(iCell) -do i=1,nEdgesOnCellNew(iCellNew) -j = edgesOnCell(i,iCell) -jNew = edgeMap(j) -if(jNew.eq.0) stop "edgesOnCell" -edgesOnCellNew(i,iCellNew) = jNew -meshSpacingNew(iCellNew) = meshSpacingNew(iCellNew) + dcEdgeNew(jNew)/nEdgesOnCellNew(iCellNew) -enddo -enddo -deallocate(edgesOnCell) - -allocate(verticesOnCellNew(maxEdgesNew,nCellsNew)) -verticesOnCellNew(:,:)=0 -do iCell=1,nCells -if(cellMap(iCell).eq.0) cycle -iCellNew = cellMap(iCell) -do i=1,nEdgesOnCellNew(iCellNew) -j=verticesOnCell(i,iCell) -jNew = vertexMap(j) -if(jNew.eq.0) stop "verticesOnCell" -verticesOnCellNew(i,iCellNew) = jNew -enddo -enddo -deallocate(verticesOnCell) - -allocate(cellsOnVertexNew(vertexDegreeNew,nVerticesNew)) -allocate(kiteAreasOnVertexNew(vertexDegreeNew,nVerticesNew)) -cellsOnVertexNew = 0 -kiteAreasOnVertexNew = 0 -do iVertex=1,nVertices -if(vertexMap(iVertex).eq.0) cycle -iVertexNew = vertexMap(iVertex) -do i=1,vertexDegree -j=cellsOnVertex(i,iVertex) -jNew=cellMap(j) -if(jNew.eq.0) then - kiteAreasOnVertexNew(i,iVertexNew)=0 -else - kiteAreasOnVertexNew(i,iVertexNew)=kiteAreasOnVertex(i,iVertex) -endif -cellsOnVertexNew(i,iVertexNew)=jNew -enddo -enddo -deallocate(cellsOnVertex) -deallocate(kiteAreasOnVertex) - -areaTriangleNew = 0 -do iVertex=1,nVerticesNew -do i=1,vertexDegree -areaTriangleNew(iVertex) = areaTriangleNew(iVertex) + kiteAreasOnVertexNew(i,iVertex) -enddo -enddo - -allocate(edgesOnVertexNew(vertexDegreeNew, nVerticesNew)) -edgesOnVertexNew = 0 -do iVertex=1,nVertices -if(vertexMap(iVertex).eq.0) cycle -iVertexNew = vertexMap(iVertex) -do i=1,vertexDegree -j=edgesOnVertex(i,iVertex) -jNew=edgeMap(j) -edgesOnVertexNew(i,iVertexNew)=jNew -enddo -enddo -deallocate(edgesOnVertex) - -! find normals -normalsNew = 0.0 -do iEdge=1,nEdgesNew -cell1 = cellsOnEdgeNew(1,iEdge) -cell2 = cellsOnEdgeNew(2,iEdge) -if(cell1.eq.0.or.cell2.eq.0) cycle -c1(1) = xCellNew(cell1); c1(2) = yCellNew(cell1); c1(3) = zCellNew(cell1) -c2(1) = xCellNew(cell2); c2(2) = yCellNew(cell2); c2(3) = zCellNew(cell2) -distance = sqrt( (c1(1)-c2(1))**2 + (c1(2)-c2(2))**2 + (c1(3)-c2(3))**2 ) - -if(on_a_sphere.eq.'YES') then - normalsNew(1,iEdge) = c2(1) - c1(1) - normalsNew(2,iEdge) = c2(2) - c1(2) - normalsNew(3,iEdge) = c2(3) - c1(3) - distance = sqrt( (c1(1)-c2(1))**2 + (c1(2)-c2(2))**2 + (c1(3)-c2(3))**2 ) - normalsNew(:,iEdge) = normalsNew(:,iEdge) / distance -else - if(distance.gt.0.5*Lx) then - write(6,*) ' periodic edge ', iEdge, distance - write(6,10) ' c1 ', c1(:) - write(6,10) ' c2 ', c2(:) - r = c2(1) - c1(1) - if(r.gt.0) c2(1) = c2(1) - Lx - if(r.lt.0) c2(1) = c2(1) + Lx - distance = sqrt( (c1(1)-c2(1))**2 + (c1(2)-c2(2))**2 + (c1(3)-c2(3))**2 ) - write(6,*) ' periodic edge fix ', iEdge, r, distance - endif - normalsNew(1,iEdge) = c2(1) - c1(1) - normalsNew(2,iEdge) = c2(2) - c1(2) - normalsNew(3,iEdge) = c2(3) - c1(3) - distance = sqrt( (c1(1)-c2(1))**2 + (c1(2)-c2(2))**2 + (c1(3)-c2(3))**2 ) - normalsNew(:,iEdge) = normalsNew(:,iEdge) / distance -endif -enddo -10 format(a20,3e15.5) - -end subroutine map_connectivity - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! -! Step 5: Check get_dz routine for hZLevel variable -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine get_dz -integer k - -if (zLevel_thickness.eq.'equally_spaced') then - - write(6,*) ' equally spaced zLevels' - do i = 1, nVertLevelsMOD - hZLevel(i) = layer_thickness_total_max / nVertLevelsMOD - end do - -elseif(zLevel_thickness.eq.'SOMA_40_Level') then - - allocate(dz(40)) - dz( 1) = 4.6074 - dz( 2) = 5.1112 - dz( 3) = 5.6694 - dz( 4) = 6.2880 - dz( 5) = 6.9732 - dz( 6) = 7.7322 - dz( 7) = 8.5730 - dz( 8) = 9.5041 - dz( 9) = 10.5353 - dz(10) = 11.6770 - dz(11) = 12.9411 - dz(12) = 14.3404 - dz(13) = 15.8894 - dz(14) = 17.6038 - dz(15) = 19.5011 - dz(16) = 21.6005 - dz(17) = 23.9235 - dz(18) = 26.4934 - dz(19) = 29.3363 - dz(20) = 32.4807 - dz(21) = 35.9585 - dz(22) = 39.8044 - dz(23) = 44.0570 - dz(24) = 48.7588 - dz(25) = 53.9568 - dz(26) = 59.7026 - dz(27) = 66.0534 - dz(28) = 73.0722 - dz(29) = 80.8284 - dz(30) = 89.3985 - dz(31) = 98.8672 - dz(32) = 109.3274 - dz(33) = 120.8818 - dz(34) = 133.6436 - dz(35) = 147.7376 - dz(36) = 163.3011 - dz(37) = 180.4858 - dz(38) = 199.4585 - dz(39) = 220.4032 - dz(40) = 243.5224 - - hZLevel = dz - -elseif(zLevel_thickness.eq.'POP_40_zLevel') then - - allocate(dz(40)) - dz( 1) = 1001.244 ! 5.006218 10.01244 - dz( 2) = 1011.258 ! 15.06873 20.12502 - dz( 3) = 1031.682 ! 25.28342 30.44183 - dz( 4) = 1063.330 ! 35.75848 41.07513 - dz( 5) = 1107.512 ! 46.61269 52.15025 - dz( 6) = 1166.145 ! 57.98098 63.81171 - dz( 7) = 1241.928 ! 70.02135 76.23099 - dz( 8) = 1338.612 ! 82.92405 89.61711 - dz( 9) = 1461.401 ! 96.92412 104.2311 - dz(10) = 1617.561 ! 112.3189 120.4067 - dz(11) = 1817.368 ! 129.4936 138.5804 - dz(12) = 2075.558 ! 148.9582 159.3360 - dz(13) = 2413.680 ! 171.4044 183.4728 - dz(14) = 2863.821 ! 197.7919 212.1110 - dz(15) = 3474.644 ! 229.4842 246.8575 - dz(16) = 4320.857 ! 268.4617 290.0660 - dz(17) = 5516.812 ! 317.6501 345.2342 - dz(18) = 7230.458 ! 381.3865 417.5388 - dz(19) = 9674.901 ! 465.9133 514.2878 - dz(20) = 13003.92 ! 579.3074 644.3270 - dz(21) = 17004.89 ! 729.3514 814.3759 - dz(22) = 20799.33 ! 918.3725 1022.369 - dz(23) = 23356.94 ! 1139.154 1255.939 - dz(24) = 24527.19 ! 1378.574 1501.210 - dz(25) = 24898.04 ! 1625.701 1750.191 - dz(26) = 24983.22 ! 1875.107 2000.023 - dz(27) = 24997.87 ! 2125.012 2250.002 - dz(28) = 24999.79 ! 2375.000 2500.000 - dz(29) = 24999.98 ! 2625.000 2749.999 - dz(30) = 25000.00 ! 2874.999 2999.999 - dz(31) = 25000.00 ! 3124.999 3249.999 - dz(32) = 25000.00 ! 3374.999 3499.999 - dz(33) = 25000.00 ! 3624.999 3749.999 - dz(34) = 25000.00 ! 3874.999 3999.999 - dz(35) = 25000.00 ! 4124.999 4249.999 - dz(36) = 25000.00 ! 4374.999 4499.999 - dz(37) = 25000.00 ! 4624.999 4749.999 - dz(38) = 25000.00 ! 4874.999 4999.999 - dz(39) = 25000.00 ! 5124.999 5249.999 - dz(40) = 25000.00 ! 5374.999 5499.999 - - dz = dz / 100.0 - - hZLevel = dz - -elseif(zLevel_thickness.eq.'scaled_100Level') then - - allocate(dz(100)) - dz( 1) = 0.75707E-03 - dz( 2) = 0.76186E-03 - dz( 3) = 0.76675E-03 - dz( 4) = 0.77174E-03 - dz( 5) = 0.77685E-03 - dz( 6) = 0.78207E-03 - dz( 7) = 0.78740E-03 - dz( 8) = 0.79286E-03 - dz( 9) = 0.79844E-03 - dz( 10) = 0.80416E-03 - dz( 11) = 0.81001E-03 - dz( 12) = 0.81600E-03 - dz( 13) = 0.82215E-03 - dz( 14) = 0.82844E-03 - dz( 15) = 0.83490E-03 - dz( 16) = 0.84152E-03 - dz( 17) = 0.84832E-03 - dz( 18) = 0.85529E-03 - dz( 19) = 0.86246E-03 - dz( 20) = 0.86983E-03 - dz( 21) = 0.87740E-03 - dz( 22) = 0.88519E-03 - dz( 23) = 0.89320E-03 - dz( 24) = 0.90145E-03 - dz( 25) = 0.90996E-03 - dz( 26) = 0.91872E-03 - dz( 27) = 0.92776E-03 - dz( 28) = 0.93709E-03 - dz( 29) = 0.94672E-03 - dz( 30) = 0.95667E-03 - dz( 31) = 0.96696E-03 - dz( 32) = 0.97761E-03 - dz( 33) = 0.98864E-03 - dz( 34) = 0.10001E-02 - dz( 35) = 0.10119E-02 - dz( 36) = 0.10242E-02 - dz( 37) = 0.10370E-02 - dz( 38) = 0.10503E-02 - dz( 39) = 0.10642E-02 - dz( 40) = 0.10786E-02 - dz( 41) = 0.10936E-02 - dz( 42) = 0.11093E-02 - dz( 43) = 0.11258E-02 - dz( 44) = 0.11430E-02 - dz( 45) = 0.11610E-02 - dz( 46) = 0.11799E-02 - dz( 47) = 0.11999E-02 - dz( 48) = 0.12208E-02 - dz( 49) = 0.12430E-02 - dz( 50) = 0.12663E-02 - dz( 51) = 0.12911E-02 - dz( 52) = 0.13174E-02 - dz( 53) = 0.13454E-02 - dz( 54) = 0.13752E-02 - dz( 55) = 0.14071E-02 - dz( 56) = 0.14413E-02 - dz( 57) = 0.14781E-02 - dz( 58) = 0.15178E-02 - dz( 59) = 0.15609E-02 - dz( 60) = 0.16077E-02 - dz( 61) = 0.16590E-02 - dz( 62) = 0.17154E-02 - dz( 63) = 0.17778E-02 - dz( 64) = 0.18472E-02 - dz( 65) = 0.19252E-02 - dz( 66) = 0.20136E-02 - dz( 67) = 0.21149E-02 - dz( 68) = 0.22323E-02 - dz( 69) = 0.23706E-02 - dz( 70) = 0.25367E-02 - dz( 71) = 0.27410E-02 - dz( 72) = 0.30005E-02 - dz( 73) = 0.33448E-02 - dz( 74) = 0.38321E-02 - dz( 75) = 0.45991E-02 - dz( 76) = 0.60952E-02 - dz( 77) = 0.14875E-01 - dz( 78) = 0.23276E-01 - dz( 79) = 0.24181E-01 - dz( 80) = 0.24731E-01 - dz( 81) = 0.25319E-01 - dz( 82) = 0.25952E-01 - dz( 83) = 0.26635E-01 - dz( 84) = 0.27375E-01 - dz( 85) = 0.28180E-01 - dz( 86) = 0.29061E-01 - dz( 87) = 0.30030E-01 - dz( 88) = 0.31103E-01 - dz( 89) = 0.32300E-01 - dz( 90) = 0.33647E-01 - dz( 91) = 0.35178E-01 - dz( 92) = 0.36940E-01 - dz( 93) = 0.38998E-01 - dz( 94) = 0.41445E-01 - dz( 95) = 0.44422E-01 - dz( 96) = 0.48154E-01 - dz( 97) = 0.53032E-01 - dz( 98) = 0.59808E-01 - dz( 99) = 0.70207E-01 - dz(100) = 0.89718E-01 - - hZLevel = dz * layer_thickness_total_max - -elseif(zLevel_thickness.eq.'POP_60_zLevel') then - - allocate(dz(60)) - dz( 1) = 5.00 - dz( 2) = 10.00 - dz( 3) = 10.00 - dz( 4) = 10.00 - dz( 5) = 10.00 - dz( 6) = 10.00 - dz( 7) = 10.00 - dz( 8) = 10.00 - dz( 9) = 10.00 - dz(10) = 10.00 - dz(11) = 10.00 - dz(12) = 10.00 - dz(13) = 10.00 - dz(14) = 10.00 - dz(15) = 10.00 - dz(16) = 10.00 - dz(17) = 10.10 - dz(18) = 10.38 - dz(19) = 10.81 - dz(20) = 11.37 - dz(21) = 12.05 - dz(22) = 12.87 - dz(23) = 13.83 - dz(24) = 14.96 - dz(25) = 16.28 - dz(26) = 17.83 - dz(27) = 19.64 - dz(28) = 21.75 - dz(29) = 24.23 - dz(30) = 27.13 - dz(31) = 30.56 - dz(32) = 34.59 - dz(33) = 39.36 - dz(34) = 44.99 - dz(35) = 51.64 - dz(36) = 59.49 - dz(37) = 68.70 - dz(38) = 79.44 - dz(39) = 91.82 - dz(40) = 105.88 - dz(41) = 121.50 - dz(42) = 138.36 - dz(43) = 155.93 - dz(44) = 173.45 - dz(45) = 190.06 - dz(46) = 204.94 - dz(47) = 217.51 - dz(48) = 227.51 - dz(49) = 235.03 - dz(50) = 240.38 - dz(51) = 244.02 - dz(52) = 246.39 - dz(53) = 247.88 - dz(54) = 248.78 - dz(55) = 249.32 - dz(56) = 249.62 - dz(57) = 249.80 - dz(58) = 249.89 - dz(59) = 249.94 - dz(60) = 249.97 - - hZLevel = dz - -elseif(zLevel_thickness.eq.'isopycnal_3layer') then - - allocate(dz(3)) - dz( 1) = 500.0 - dz( 2) = 1250.0 - dz( 3) = 3250.0 - - hZLevel = dz - -else - - print *, ' Incorrect choice of zLevel_thickness: ',zLevel_thickness - stop - -endif - - refBottomDepth(1) = hZLevel(1) - do k = 2,nVertLevelsMod - refBottomDepth(k) = refBottomDepth(k-1) + hZLevel(k) - end do - - write(6,*) ' k hZLevel refBottomDepth' - do k=1,nVertLevelsMod - write(6,'(i5,2f10.2)') k,hZLevel(k), refBottomDepth(k) - enddo - write(6,*) - -end subroutine get_dz - -end program map_to_basin diff --git a/grid_gen/basin/src/module_cullLoops.F b/grid_gen/basin/src/module_cullLoops.F deleted file mode 100644 index e070a1ce2..000000000 --- a/grid_gen/basin/src/module_cullLoops.F +++ /dev/null @@ -1,84 +0,0 @@ -module cullLoops - - public :: eliminateLoops - - contains - - subroutine eliminateLoops(nCells,nEdges,nVertices,maxEdges,vertexDegree, & - nEdgesOnCell, cellsOnCell, verticesOnEdge, cellsOnVertex, edgesOnCell, lonCell, latCell, & - xCell, yCell, zCell, xEdge, yEdge, zEdge, xVertex, yVertex, zVertex, & - KMT) - - implicit none - - ! intent (in) - integer :: nCells, nEdges, nVertices, maxEdges, vertexDegree - integer :: nEdgesOnCell(nCells), cellsOnCell(maxEdges,nCells), verticesOnEdge(2,nEdges) - integer :: cellsOnVertex(vertexDegree,nVertices), edgesOnCell(maxEdges,nCells) - real :: lonCell(nCells), latCell(nCells) - real :: xCell(nCells), yCell(nCells), zCell(nCells) - real :: xEdge(nEdges), yEdge(nEdges), zEdge(nEdges) - real :: xVertex(nVertices), yVertex(nVertices), zVertex(nVertices) - integer :: edgeList(nEdges), iCellMask(nCells) - - ! intent(inout) - integer, intent(inout) :: KMT(ncells) - - ! local workspace - integer :: iCell, jCell, oCell, lCell, iEdge, i, kCell, iSharedEdge, iStartEdge, iSave, iSweep - integer :: iEdgeCounter, nEdgesInLoop(nCells), iCellAhead, LeftTurns, RightTurns - logical :: connected, atBoundary, moveSouth, moveEast, atGrenwich - real :: lat, rlat, rlon, rCenter(3), s(3), t(3), q(3), rCross, mylon, mylat, pi - - integer, dimension(:), pointer :: cellStack - integer, dimension(:), pointer :: oceanMask - integer :: iCellStart, nStack, addedCells - real :: latStart, lonStart - - write(6,*) 'Culling inland seas.....' - - allocate(cellStack(nCells/2)) - allocate(oceanMask(nCells)) - - oceanMask = 0 - addedCells = 0 - - iCellStart = maxloc(kmt, dim=1) - - write(6,*) 'Starting index. ', iCellStart - write(6,*) 'lat, lon: ', latCell(iCellStart), lonCell(iCellStart) - write(6,*) 'Starting kmt: ', kmt(iCellStart) - - nStack = 1 - cellStack(nStack) = iCellStart - oceanMask(iCellStart) = 1 - addedCells = 1 - - do while(nStack > 0) - oCell = cellStack(nStack) - nStack = nStack - 1 - !write(6,*) ' Working on cell ', oCell, addedCells, nStack - - do i = 1, nEdgesOnCell(oCell) - iCell = cellsOnCell(i, oCell) - - if(kmt(iCell) > 0 .and. oceanMask(iCell) == 0) then - nStack = nStack + 1 - cellStack(nStack) = iCell - oceanMask(iCell) = 1 - addedCells = addedCells + 1 - end if - end do - end do - - where(oceanMask == 0) kmt(:) = 0 - - write(6,*) addedCells, ' total cells have been in the stack.' - write(6,*) 'Done culling inland seas.....' - - deallocate(cellStack) - deallocate(oceanMask) - - end subroutine eliminateLoops - -end module cullLoops diff --git a/grid_gen/basin/src/module_read_TS.F b/grid_gen/basin/src/module_read_TS.F deleted file mode 100644 index 10bc4350f..000000000 --- a/grid_gen/basin/src/module_read_TS.F +++ /dev/null @@ -1,143 +0,0 @@ -module read_TS - - integer :: rd_ncid, rd_ncids - integer :: rdDimIDt_lon - integer :: rdDimIDt_lat - integer :: rdDimIDdepth_t - integer :: rdVarIDt_lon - integer :: rdVarIDt_lat - integer :: rdVarIDdepth_t - integer :: rdVarIDTEMP - integer :: rdVarIDSALT - - integer :: rdLocalt_lon - integer :: rdLocalt_lat - integer :: rdLocaldepth_t - - contains - - subroutine read_TS_init(nx, ny, nz, fileNameT, fileNameS) - - implicit none - - include 'netcdf.inc' - - character(len=80), intent(in) :: fileNameT, fileNameS - integer, intent(out) :: nx, ny, nz - - integer :: nferr, nferrs, nferru - - nferr = nf_open(fileNameT, NF_SHARE, rd_ncid) - write(6,*) ' nferr ', nferr, rd_ncid - - ! - ! Get IDs for variable dimensions - ! - nferr = nf_inq_dimid(rd_ncid, 't_lon', rdDimIDt_lon) - write(6,*) ' nferr ', nferr, rdDimIDt_lon - nferr = nf_inq_dimlen(rd_ncid, rdDimIDt_lon, rdLocalt_lon) - write(6,*) ' nferr ', nferr, rdLocalt_lon - nferr = nf_inq_dimid(rd_ncid, 't_lat', rdDimIDt_lat) - write(6,*) ' nferr ', nferr, rdDimIDt_lat - nferr = nf_inq_dimlen(rd_ncid, rdDimIDt_lat, rdLocalt_lat) - write(6,*) ' nferr ', nferr, rdLocalt_lat - nferr = nf_inq_dimid(rd_ncid, 'depth_t', rdDimIDdepth_t) - write(6,*) ' nferr ', nferr, rdDimIDdepth_t - nferr = nf_inq_dimlen(rd_ncid, rdDimIDdepth_t, rdLocaldepth_t) - write(6,*) ' nferr ', nferr, rdLocaldepth_t - - nx = rdLocalt_lon - ny = rdLocalt_lat - nz = rdLocaldepth_t - - write(6,*) nx, ny, nz - - ! - ! Get IDs for variables - ! - nferr = nf_inq_varid(rd_ncid, 't_lon', rdVarIDt_lon) - write(6,*) ' nferr ', nferr, rdVarIDt_lon - nferr = nf_inq_varid(rd_ncid, 't_lat', rdVarIDt_lat) - write(6,*) ' nferr ', nferr, rdVarIDt_lat - nferr = nf_inq_varid(rd_ncid, 'depth_t', rdVarIDdepth_t) - write(6,*) ' nferr ', nferr, rdVarIDdepth_t - nferr = nf_inq_varid(rd_ncid, 'TEMP', rdVarIDTEMP) - write(6,*) ' nferr ', nferr, rdVarIDTEMP - - nferrs = nf_open(fileNameS, NF_SHARE, rd_ncids) - nferrs = nf_inq_varid(rd_ncids, 'SALT', rdVarIDSALT) - write(6,*) ' nferrs ', nferrs, rdVarIDSALT - - end subroutine read_TS_init - - subroutine read_TS_fields(t_lon, t_lat, depth_t, TEMP, SALT) - - implicit none - - include 'netcdf.inc' - - real (kind=4), dimension(:), intent(out) :: t_lon, t_lat, depth_t - real (kind=4), dimension(:,:,:), intent(out) :: TEMP, SALT - - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - integer :: nferr, nferrs, nferru - - start1(1) = 1 - count1(1) = rdLocalt_lon - nferr = nf_get_vara_real(rd_ncid, rdVarIDt_lon, start1, count1, t_lon) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDt_lon - - start1(1) = 1 - count1(1) = rdLocalt_lat - nferr = nf_get_vara_real(rd_ncid, rdVarIDt_lat, start1, count1, t_lat) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDt_lat - - start1(1) = 1 - count1(1) = rdLocaldepth_t - nferr = nf_get_vara_real(rd_ncid, rdVarIDdepth_t, start1, count1, depth_t) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDdepth_t - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - count3(1) = rdLocalt_lon - count3(2) = rdLocalt_lat - count3(3) = rdLocaldepth_t - nferr = nf_get_vara_real(rd_ncid, rdVarIDTEMP, start3, count3, TEMP) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDTEMP - write(6,*) ' temperature' , minval(TEMP), maxval(TEMP) - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - count3(1) = rdLocalt_lon - count3(2) = rdLocalt_lat - count3(3) = rdLocaldepth_t - nferrs = nf_get_vara_real(rd_ncids, rdVarIDSALT, start3, count3, SALT) - write(6,*) ' nferrs ', nferrs, rd_ncids, rdVarIDSALT - write(6,*) ' salinity' , minval(SALT), maxval(SALT) - - end subroutine read_TS_fields - - - subroutine read_TS_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr, nferrs - - nferr = nf_close(rd_ncid) - write(6,*) ' nferr ', nferr - - nferrs = nf_close(rd_ncids) - write(6,*) ' nferrs ', nferrs - - end subroutine read_TS_finalize - -end module read_TS diff --git a/grid_gen/basin/src/module_read_U.F b/grid_gen/basin/src/module_read_U.F deleted file mode 100644 index b794e5ae2..000000000 --- a/grid_gen/basin/src/module_read_U.F +++ /dev/null @@ -1,130 +0,0 @@ -module read_U - - integer :: rd_ncidu - integer :: rdDimIDu_lon - integer :: rdDimIDu_lat - integer :: rdDimIDdepth_t - integer :: rdVarIDu_lon - integer :: rdVarIDu_lat - integer :: rdVarIDdepth_t - integer :: rdVarIDTAUX - integer :: rdVarIDTAUY - - integer :: rdLocalu_lon - integer :: rdLocalu_lat - integer :: rdLocaldepth_t - - contains - - subroutine read_U_init(nx, ny, nz, fileNameU) - - implicit none - - include 'netcdf.inc' - - character(len=80), intent(in) :: fileNameU - integer, intent(out) :: nx, ny, nz - - integer :: nferru - - nferru = nf_open(fileNameU, NF_SHARE, rd_ncidu) - write(6,*) ' nferru ', nferru, rd_ncidu - - ! - ! Get IDs for variable dimensions - ! - nferru = nf_inq_dimid(rd_ncidu, 'u_lon', rdDimIDu_lon) - write(6,*) ' nferru ', nferru, rdDimIDu_lon - nferru = nf_inq_dimlen(rd_ncidu, rdDimIDu_lon, rdLocalu_lon) - write(6,*) ' nferru ', nferru, rdLocalu_lon - nferru = nf_inq_dimid(rd_ncidu, 'u_lat', rdDimIDu_lat) - write(6,*) ' nferru ', nferru, rdDimIDu_lat - nferru = nf_inq_dimlen(rd_ncidu, rdDimIDu_lat, rdLocalu_lat) - write(6,*) ' nferru ', nferru, rdLocalu_lat - nferru = nf_inq_dimid(rd_ncidu, 'depth_t', rdDimIDdepth_t) - write(6,*) ' nferru ', nferru, rdDimIDdepth_t - nferru = nf_inq_dimlen(rd_ncidu, rdDimIDdepth_t, rdLocaldepth_t) - write(6,*) ' nferru ', nferru, rdLocaldepth_t - - nx = rdLocalu_lon - ny = rdLocalu_lat - nz = rdLocaldepth_t - - write(6,*) nx, ny, nz - - ! - ! Get IDs for variables - ! - nferru = nf_inq_varid(rd_ncidu, 'u_lon', rdVarIDu_lon) - write(6,*) ' nferru ', nferru, rdVarIDu_lon - nferru = nf_inq_varid(rd_ncidu, 'u_lat', rdVarIDu_lat) - write(6,*) ' nferru ', nferru, rdVarIDu_lat - nferru = nf_inq_varid(rd_ncidu, 'depth_t', rdVarIDdepth_t) - write(6,*) ' nferru ', nferru, rdVarIDdepth_t - - nferru = nf_inq_varid(rd_ncidu, 'TAUX', rdVarIDTAUX) - nferru = nf_inq_varid(rd_ncidu, 'TAUY', rdVarIDTAUY) - write(6,*) ' nferru ', nferru, rdVarIDTAUX, rdVarIDTAUY - - end subroutine read_U_init - - subroutine read_U_fields(u_lon, u_lat, depth_t, TAUX, TAUY) - - implicit none - - include 'netcdf.inc' - - real (kind=4), dimension(:), intent(out) :: u_lon, u_lat, depth_t - real (kind=4), dimension(:,:), intent(out) :: TAUX, TAUY - - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - integer :: nferru - - start1(1) = 1 - count1(1) = rdLocalu_lon - nferru = nf_get_vara_real(rd_ncidu, rdVarIDu_lon, start1, count1, u_lon) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDu_lon - - start1(1) = 1 - count1(1) = rdLocalu_lat - nferru = nf_get_vara_real(rd_ncidu, rdVarIDu_lat, start1, count1, u_lat) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDu_lat - - start1(1) = 1 - count1(1) = rdLocaldepth_t - nferru = nf_get_vara_real(rd_ncidu, rdVarIDdepth_t, start1, count1, depth_t) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDdepth_t - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalu_lon - count2(2) = rdLocalu_lat - nferru = nf_get_vara_real(rd_ncidu, rdVarIDTAUX, start2, count2, TAUX) - nferru = nf_get_vara_real(rd_ncidu, rdVarIDTAUY, start2, count2, TAUY) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDTAUX, rdVarIDTAUY - write(6,*) ' TAUX' , minval(TAUX), maxval(TAUX) - write(6,*) ' TAUY' , minval(TAUY), maxval(TAUY) - - - end subroutine read_U_fields - - - subroutine read_U_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferru - - nferru = nf_close(rd_ncidu) - write(6,*) ' nferru ', nferru - - - end subroutine read_U_finalize - -end module read_U diff --git a/grid_gen/basin/src/module_read_monthly.F b/grid_gen/basin/src/module_read_monthly.F deleted file mode 100644 index dbb962125..000000000 --- a/grid_gen/basin/src/module_read_monthly.F +++ /dev/null @@ -1,154 +0,0 @@ -module read_MONTHLY - - private - - public :: read_MONTHLY_init, read_MONTHLY_fields, read_MONTHLY_finalize - - integer :: rd_ncid, rd_ncids, rd_ncidu - integer :: rdDimIDt_lon - integer :: rdDimIDt_lat - integer :: rdDimIDdepth_t - integer :: rdVarIDt_lon - integer :: rdVarIDt_lat - integer :: rdVarIDdepth_t - integer :: rdVarIDTEMP - integer :: rdVarIDSALT - integer :: rdVarIDTAUX - integer :: rdVarIDTAUY - - integer :: rdLocalt_lon - integer :: rdLocalt_lat - integer :: rdLocaldepth_t - - contains - - subroutine read_MONTHLY_init(nx, ny, nz, fileNameT, fileNameS, fileNameU) - - implicit none - - include 'netcdf.inc' - - character(len=80), intent(in) :: fileNameT, fileNameS, fileNameU - integer, intent(out) :: nx, ny, nz - - integer :: nferr, nferrs, nferru - - nferr = nf_open(fileNameT, NF_SHARE, rd_ncid) - write(6,*) ' nferr ', nferr, rd_ncid - - write(6,*) fileNameT - write(6,*) fileNameS - write(6,*) fileNameU - - ! - ! Get IDs for variable dimensions - ! - nferr = nf_inq_dimid(rd_ncid, 't_lon', rdDimIDt_lon) - write(6,*) ' nferr ', nferr, rdDimIDt_lon - nferr = nf_inq_dimlen(rd_ncid, rdDimIDt_lon, rdLocalt_lon) - write(6,*) ' nferr ', nferr, rdLocalt_lon - nferr = nf_inq_dimid(rd_ncid, 't_lat', rdDimIDt_lat) - write(6,*) ' nferr ', nferr, rdDimIDt_lat - nferr = nf_inq_dimlen(rd_ncid, rdDimIDt_lat, rdLocalt_lat) - write(6,*) ' nferr ', nferr, rdLocalt_lat - nferr = nf_inq_dimid(rd_ncid, 'depth_t', rdDimIDdepth_t) - write(6,*) ' nferr ', nferr, rdDimIDdepth_t - nferr = nf_inq_dimlen(rd_ncid, rdDimIDdepth_t, rdLocaldepth_t) - write(6,*) ' nferr ', nferr, rdLocaldepth_t - - nx = rdLocalt_lon - ny = rdLocalt_lat - nz = rdLocaldepth_t - - write(6,*) nx, ny, nz - - ! - ! Get IDs for variables - ! - nferr = nf_inq_varid(rd_ncid, 't_lon', rdVarIDt_lon) - write(6,*) ' nferr long ', nferr, rdVarIDt_lon - nferr = nf_inq_varid(rd_ncid, 't_lat', rdVarIDt_lat) - write(6,*) ' nferr lat ', nferr, rdVarIDt_lat - nferr = nf_inq_varid(rd_ncid, 'depth_t', rdVarIDdepth_t) - write(6,*) ' nferr depth ', nferr, rdVarIDdepth_t - nferr = nf_inq_varid(rd_ncid, 'TEMP', rdVarIDTEMP) - write(6,*) ' nferr TEMP ', nferr, rdVarIDTEMP - - nferrs = nf_open(fileNameS, NF_SHARE, rd_ncids) - nferrs = nf_inq_varid(rd_ncids, 'SALT', rdVarIDSALT) - write(6,*) ' nferrs SALT ', nferrs, rdVarIDSALT - - nferru = nf_open(fileNameU, NF_SHARE, rd_ncidu) - nferru = nf_inq_varid(rd_ncidu, 'TAUX', rdVarIDTAUX) - nferru = nf_inq_varid(rd_ncidu, 'TAUY', rdVarIDTAUY) - write(6,*) ' nferru ', nferru, rdVarIDTAUX, rdVarIDTAUY - - end subroutine read_MONTHLY_init - - subroutine read_MONTHLY_fields(TEMP, SALT, TAUX, TAUY) - - implicit none - - include 'netcdf.inc' - - real (kind=4), dimension(:,:), intent(out) :: TEMP, SALT - real (kind=4), dimension(:,:), intent(out) :: TAUX, TAUY - - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - integer :: nferr, nferrs, nferru - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalt_lon - count2(2) = rdLocalt_lat - nferr = nf_get_vara_real(rd_ncid, rdVarIDTEMP, start2, count2, TEMP) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDTEMP - write(6,*) ' temperature' , minval(TEMP), maxval(TEMP) - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalt_lon - count2(2) = rdLocalt_lat - nferrs = nf_get_vara_real(rd_ncids, rdVarIDSALT, start2, count2, SALT) - write(6,*) ' nferrs ', nferrs, rd_ncids, rdVarIDSALT - write(6,*) ' salinity' , minval(SALT), maxval(SALT) - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalt_lon - count2(2) = rdLocalt_lat - nferru = nf_get_vara_real(rd_ncidu, rdVarIDTAUX, start2, count2, TAUX) - nferru = nf_get_vara_real(rd_ncidu, rdVarIDTAUY, start2, count2, TAUY) - write(6,*) ' nferru ', nferru, rd_ncidu, rdVarIDTAUX, rdVarIDTAUY - write(6,*) ' TAUX' , minval(TAUX), maxval(TAUX) - write(6,*) ' TAUY' , minval(TAUY), maxval(TAUY) - - - end subroutine read_MONTHLY_fields - - - subroutine read_MONTHLY_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr, nferrs, nferru - - nferr = nf_close(rd_ncid) - write(6,*) ' nferr ', nferr - - nferrs = nf_close(rd_ncids) - write(6,*) ' nferrs ', nferrs - - nferru = nf_close(rd_ncidu) - write(6,*) ' nferru ', nferru - - - end subroutine read_MONTHLY_finalize - -end module read_MONTHLY diff --git a/grid_gen/basin/src/module_read_netcdf.F b/grid_gen/basin/src/module_read_netcdf.F deleted file mode 100644 index b4072c3e5..000000000 --- a/grid_gen/basin/src/module_read_netcdf.F +++ /dev/null @@ -1,523 +0,0 @@ -module read_netcdf - - integer :: rd_ncid - integer :: rdDimIDTime - integer :: rdDimIDnCells - integer :: rdDimIDnEdges - integer :: rdDimIDnVertices - integer :: rdDimIDmaxEdges - integer :: rdDimIDmaxEdges2 - integer :: rdDimIDnVertLevels - integer :: rdDimIDTWO - integer :: rdDimIDvertexDegree - integer :: rdVarIDlatCell - integer :: rdVarIDlonCell - integer :: rdVarIDmeshDensity - integer :: rdVarIDxCell - integer :: rdVarIDyCell - integer :: rdVarIDzCell - integer :: rdVarIDindexToCellID - integer :: rdVarIDlatEdge - integer :: rdVarIDlonEdge - integer :: rdVarIDxEdge - integer :: rdVarIDyEdge - integer :: rdVarIDzEdge - integer :: rdVarIDindexToEdgeID - integer :: rdVarIDlatVertex - integer :: rdVarIDlonVertex - integer :: rdVarIDxVertex - integer :: rdVarIDyVertex - integer :: rdVarIDzVertex - integer :: rdVarIDindexToVertexID - integer :: rdVarIDcellsOnEdge - integer :: rdVarIDnEdgesOnCell - integer :: rdVarIDnEdgesOnEdge - integer :: rdVarIDedgesOnCell - integer :: rdVarIDedgesOnEdge - integer :: rdVarIDweightsOnEdge - integer :: rdVarIDdvEdge - integer :: rdVarIDdcEdge - integer :: rdVarIDangleEdge - integer :: rdVarIDareaCell - integer :: rdVarIDareaTriangle - integer :: rdVarIDcellsOnCell - integer :: rdVarIDverticesOnCell - integer :: rdVarIDverticesOnEdge - integer :: rdVarIDedgesOnVertex - integer :: rdVarIDcellsOnVertex - integer :: rdVarIDkiteAreasOnVertex - integer :: rdVarIDfEdge - integer :: rdVarIDfVertex - integer :: rdVarIDbottomDepth - integer :: rdVarIDnormalVelocity - integer :: rdVarIDtangentialVelocity - integer :: rdVarIDlayerThickness - - integer :: rdLocalnCells - integer :: rdLocalnEdges - integer :: rdLocalnVertices - integer :: rdLocalmaxEdges - integer :: rdLocalmaxEdges2 - integer :: rdLocalnVertLevels - integer :: rdLocalTWO - integer :: rdLocalvertexDegree - - contains - - subroutine read_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - maxEdges2, & - nVertLevels, & - TWO, & - vertexDegree & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(out) :: nCells - integer, intent(out) :: nEdges - integer, intent(out) :: nVertices - integer, intent(out) :: maxEdges - integer, intent(out) :: maxEdges2 - integer, intent(out) :: nVertLevels - integer, intent(out) :: TWO - integer, intent(out) :: vertexDegree - - integer :: nferr - - - nferr = nf_open('grid.nc', NF_SHARE, rd_ncid) - - ! - ! Get IDs for variable dimensions - ! - nferr = nf_inq_unlimdim(rd_ncid, rdDimIDTime) - nferr = nf_inq_dimid(rd_ncid, 'nCells', rdDimIDnCells) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnCells, rdLocalnCells) - nferr = nf_inq_dimid(rd_ncid, 'nEdges', rdDimIDnEdges) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnEdges, rdLocalnEdges) - nferr = nf_inq_dimid(rd_ncid, 'nVertices', rdDimIDnVertices) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnVertices, rdLocalnVertices) - nferr = nf_inq_dimid(rd_ncid, 'maxEdges', rdDimIDmaxEdges) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDmaxEdges, rdLocalmaxEdges) - nferr = nf_inq_dimid(rd_ncid, 'maxEdges2', rdDimIDmaxEdges2) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDmaxEdges2, rdLocalmaxEdges2) - nferr = nf_inq_dimid(rd_ncid, 'nVertLevels', rdDimIDnVertLevels) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnVertLevels, rdLocalnVertLevels) - nferr = nf_inq_dimid(rd_ncid, 'vertexDegree', rdDimIDvertexDegree) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDvertexDegree, rdLocalvertexDegree) - nferr = nf_inq_dimid(rd_ncid, 'TWO', rdDimIDTWO) - nferr = nf_inq_dimlen(rd_ncid, rdDimIDTWO, rdLocalTWO) - - - nCells = rdLocalnCells - nEdges = rdLocalnEdges - nVertices = rdLocalnVertices - maxEdges = rdLocalmaxEdges - maxEdges2 = rdLocalmaxEdges2 - nVertLevels = rdLocalnVertLevels - vertexDegree = rdLocalvertexDegree - TWO = rdLocalTWO - - ! - ! Get IDs for variables - ! - nferr = nf_inq_varid(rd_ncid, 'latCell', rdVarIDlatCell) - nferr = nf_inq_varid(rd_ncid, 'lonCell', rdVarIDlonCell) - nferr = nf_inq_varid(rd_ncid, 'meshDensity', rdVarIDmeshDensity) - nferr = nf_inq_varid(rd_ncid, 'xCell', rdVarIDxCell) - nferr = nf_inq_varid(rd_ncid, 'yCell', rdVarIDyCell) - nferr = nf_inq_varid(rd_ncid, 'zCell', rdVarIDzCell) - nferr = nf_inq_varid(rd_ncid, 'indexToCellID', rdVarIDindexToCellID) - nferr = nf_inq_varid(rd_ncid, 'latEdge', rdVarIDlatEdge) - nferr = nf_inq_varid(rd_ncid, 'lonEdge', rdVarIDlonEdge) - nferr = nf_inq_varid(rd_ncid, 'xEdge', rdVarIDxEdge) - nferr = nf_inq_varid(rd_ncid, 'yEdge', rdVarIDyEdge) - nferr = nf_inq_varid(rd_ncid, 'zEdge', rdVarIDzEdge) - nferr = nf_inq_varid(rd_ncid, 'indexToEdgeID', rdVarIDindexToEdgeID) - nferr = nf_inq_varid(rd_ncid, 'latVertex', rdVarIDlatVertex) - nferr = nf_inq_varid(rd_ncid, 'lonVertex', rdVarIDlonVertex) - nferr = nf_inq_varid(rd_ncid, 'xVertex', rdVarIDxVertex) - nferr = nf_inq_varid(rd_ncid, 'yVertex', rdVarIDyVertex) - nferr = nf_inq_varid(rd_ncid, 'zVertex', rdVarIDzVertex) - nferr = nf_inq_varid(rd_ncid, 'indexToVertexID', rdVarIDindexToVertexID) - nferr = nf_inq_varid(rd_ncid, 'cellsOnEdge', rdVarIDcellsOnEdge) - nferr = nf_inq_varid(rd_ncid, 'nEdgesOnCell', rdVarIDnEdgesOnCell) - nferr = nf_inq_varid(rd_ncid, 'nEdgesOnEdge', rdVarIDnEdgesOnEdge) - nferr = nf_inq_varid(rd_ncid, 'edgesOnCell', rdVarIDedgesOnCell) - nferr = nf_inq_varid(rd_ncid, 'edgesOnEdge', rdVarIDedgesOnEdge) - nferr = nf_inq_varid(rd_ncid, 'weightsOnEdge', rdVarIDweightsOnEdge) - nferr = nf_inq_varid(rd_ncid, 'dvEdge', rdVarIDdvEdge) - nferr = nf_inq_varid(rd_ncid, 'dcEdge', rdVarIDdcEdge) - nferr = nf_inq_varid(rd_ncid, 'angleEdge', rdVarIDangleEdge) - nferr = nf_inq_varid(rd_ncid, 'areaCell', rdVarIDareaCell) - nferr = nf_inq_varid(rd_ncid, 'areaTriangle', rdVarIDareaTriangle) - nferr = nf_inq_varid(rd_ncid, 'cellsOnCell', rdVarIDcellsOnCell) - nferr = nf_inq_varid(rd_ncid, 'verticesOnCell', rdVarIDverticesOnCell) - nferr = nf_inq_varid(rd_ncid, 'verticesOnEdge', rdVarIDverticesOnEdge) - nferr = nf_inq_varid(rd_ncid, 'edgesOnVertex', rdVarIDedgesOnVertex) - nferr = nf_inq_varid(rd_ncid, 'cellsOnVertex', rdVarIDcellsOnVertex) - nferr = nf_inq_varid(rd_ncid, 'kiteAreasOnVertex', rdVarIDkiteAreasOnVertex) - nferr = nf_inq_varid(rd_ncid, 'fEdge', rdVarIDfEdge) - nferr = nf_inq_varid(rd_ncid, 'fVertex', rdVarIDfVertex) - nferr = nf_inq_varid(rd_ncid, 'bottomDepth', rdVarIDbottomDepth) - nferr = nf_inq_varid(rd_ncid, 'normalVelocity', rdVarIDnormalVelocity) - nferr = nf_inq_varid(rd_ncid, 'tangentialVelocity', rdVarIDtangentialVelocity) - nferr = nf_inq_varid(rd_ncid, 'layerThickness', rdVarIDlayerThickness) - - end subroutine read_netcdf_init - - - subroutine read_netcdf_fields( & - time, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - bottomDepth, & - normalVelocity, & - tangentialVelocity, & - layerThickness & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: time - real (kind=8), dimension(:), intent(out) :: latCell - real (kind=8), dimension(:), intent(out) :: lonCell - real (kind=8), dimension(:), intent(out) :: meshDensity - real (kind=8), dimension(:), intent(out) :: xCell - real (kind=8), dimension(:), intent(out) :: yCell - real (kind=8), dimension(:), intent(out) :: zCell - integer, dimension(:), intent(out) :: indexToCellID - real (kind=8), dimension(:), intent(out) :: latEdge - real (kind=8), dimension(:), intent(out) :: lonEdge - real (kind=8), dimension(:), intent(out) :: xEdge - real (kind=8), dimension(:), intent(out) :: yEdge - real (kind=8), dimension(:), intent(out) :: zEdge - integer, dimension(:), intent(out) :: indexToEdgeID - real (kind=8), dimension(:), intent(out) :: latVertex - real (kind=8), dimension(:), intent(out) :: lonVertex - real (kind=8), dimension(:), intent(out) :: xVertex - real (kind=8), dimension(:), intent(out) :: yVertex - real (kind=8), dimension(:), intent(out) :: zVertex - integer, dimension(:), intent(out) :: indexToVertexID - integer, dimension(:,:), intent(out) :: cellsOnEdge - integer, dimension(:), intent(out) :: nEdgesOnCell - integer, dimension(:), intent(out) :: nEdgesOnEdge - integer, dimension(:,:), intent(out) :: edgesOnCell - integer, dimension(:,:), intent(out) :: edgesOnEdge - real (kind=8), dimension(:,:), intent(out) :: weightsOnEdge - real (kind=8), dimension(:), intent(out) :: dvEdge - real (kind=8), dimension(:), intent(out) :: dcEdge - real (kind=8), dimension(:), intent(out) :: angleEdge - real (kind=8), dimension(:), intent(out) :: areaCell - real (kind=8), dimension(:), intent(out) :: areaTriangle - integer, dimension(:,:), intent(out) :: cellsOnCell - integer, dimension(:,:), intent(out) :: verticesOnCell - integer, dimension(:,:), intent(out) :: verticesOnEdge - integer, dimension(:,:), intent(out) :: edgesOnVertex - integer, dimension(:,:), intent(out) :: cellsOnVertex - real (kind=8), dimension(:,:), intent(out) :: kiteAreasOnVertex - real (kind=8), dimension(:), intent(out) :: fEdge - real (kind=8), dimension(:), intent(out) :: fVertex - real (kind=8), dimension(:), intent(out) :: bottomDepth - real (kind=8), dimension(:,:,:), intent(out) :: normalVelocity - real (kind=8), dimension(:,:,:), intent(out) :: tangentialVelocity - real (kind=8), dimension(:,:,:), intent(out) :: layerThickness - - logical :: meshDensityPresent - - integer :: nferr - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - meshDensityPresent = .false. - - start1(1) = 1 - - start2(1) = 1 - start2(2) = 1 - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDlatCell, start1, count1, latCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDlonCell, start1, count1, lonCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_inq_varid(rd_ncid, 'meshDensity', rdVarIDmeshDensity) - if(nferr.eq.0) then - nferr = nf_get_vara_double(rd_ncid, rdVarIDmeshDensity, start1, count1, meshDensity) - else - meshDensity=1.0 - write(6,*) ' mesh density not present ', nferr, rdVarIDmeshDensity - endif - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDxCell, start1, count1, xCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDyCell, start1, count1, yCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDzCell, start1, count1, zCell) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDindexToCellID, start1, count1, indexToCellID) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDlatEdge, start1, count1, latEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDlonEdge, start1, count1, lonEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDxEdge, start1, count1, xEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDyEdge, start1, count1, yEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDzEdge, start1, count1, zEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDindexToEdgeID, start1, count1, indexToEdgeID) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDlatVertex, start1, count1, latVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDlonVertex, start1, count1, lonVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDxVertex, start1, count1, xVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDyVertex, start1, count1, yVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDzVertex, start1, count1, zVertex) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_int(rd_ncid, rdVarIDindexToVertexID, start1, count1, indexToVertexID) - - start2(2) = 1 - count2( 1) = rdLocalTWO - count2( 2) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDcellsOnEdge, start2, count2, cellsOnEdge) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDnEdgesOnCell, start1, count1, nEdgesOnCell) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDnEdgesOnEdge, start1, count1, nEdgesOnEdge) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges - count2( 2) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDedgesOnCell, start2, count2, edgesOnCell) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges2 - count2( 2) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDedgesOnEdge, start2, count2, edgesOnEdge) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges2 - count2( 2) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDweightsOnEdge, start2, count2, weightsOnEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDdvEdge, start1, count1, dvEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDdcEdge, start1, count1, dcEdge) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDangleEdge, start1, count1, angleEdge) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDareaCell, start1, count1, areaCell) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDareaTriangle, start1, count1, areaTriangle) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges - count2( 2) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDcellsOnCell, start2, count2, cellsOnCell) - - start2(2) = 1 - count2( 1) = rdLocalmaxEdges - count2( 2) = rdLocalnCells - nferr = nf_get_vara_int(rd_ncid, rdVarIDverticesOnCell, start2, count2, verticesOnCell) - - start2(2) = 1 - count2( 1) = rdLocalTWO - count2( 2) = rdLocalnEdges - nferr = nf_get_vara_int(rd_ncid, rdVarIDverticesOnEdge, start2, count2, verticesOnEdge) - - start2(2) = 1 - count2( 1) = rdLocalvertexDegree - count2( 2) = rdLocalnVertices - nferr = nf_get_vara_int(rd_ncid, rdVarIDedgesOnVertex, start2, count2, edgesOnVertex) - - start2(2) = 1 - count2( 1) = rdLocalvertexDegree - count2( 2) = rdLocalnVertices - nferr = nf_get_vara_int(rd_ncid, rdVarIDcellsOnVertex, start2, count2, cellsOnVertex) - - start2(2) = 1 - count2( 1) = rdLocalvertexDegree - count2( 2) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDkiteAreasOnVertex, start2, count2, kiteAreasOnVertex) - - start1(1) = 1 - count1( 1) = rdLocalnEdges - count1( 1) = rdLocalnEdges - nferr = nf_get_vara_double(rd_ncid, rdVarIDfEdge, start1, count1, fEdge) - - start1(1) = 1 - count1( 1) = rdLocalnVertices - count1( 1) = rdLocalnVertices - nferr = nf_get_vara_double(rd_ncid, rdVarIDfVertex, start1, count1, fVertex) - - start1(1) = 1 - count1( 1) = rdLocalnCells - count1( 1) = rdLocalnCells - nferr = nf_get_vara_double(rd_ncid, rdVarIDbottomDepth, start1, count1, bottomDepth) - - start3(3) = time - count3( 1) = rdLocalnVertLevels - count3( 2) = rdLocalnEdges - count3( 3) = 1 - nferr = nf_get_vara_double(rd_ncid, rdVarIDnormalVelocity, start3, count3, normalVelocity) - - start3(3) = time - count3( 1) = rdLocalnVertLevels - count3( 2) = rdLocalnEdges - count3( 3) = 1 - nferr = nf_get_vara_double(rd_ncid, rdVarIDtangentialVelocity, start3, count3, tangentialVelocity) - - start3(3) = time - count3( 1) = rdLocalnVertLevels - count3( 2) = rdLocalnCells - count3( 3) = 1 - nferr = nf_get_vara_double(rd_ncid, rdVarIDlayerThickness, start3, count3, layerThickness) - - end subroutine read_netcdf_fields - - - subroutine read_netcdf_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(rd_ncid) - - end subroutine read_netcdf_finalize - -end module read_netcdf diff --git a/grid_gen/basin/src/module_read_topo.F b/grid_gen/basin/src/module_read_topo.F deleted file mode 100644 index 183aa7576..000000000 --- a/grid_gen/basin/src/module_read_topo.F +++ /dev/null @@ -1,109 +0,0 @@ -module read_topo - - integer :: rd_ncid - integer :: rdDimIDnx - integer :: rdDimIDny - integer :: rdVarIDz - integer :: rdVarIDx - integer :: rdVarIDy - - integer :: rdLocalnx - integer :: rdLocalny - - contains - - subroutine read_topo_init( nx, ny) - - implicit none - - include 'netcdf.inc' - - integer, intent(out) :: nx, ny - - integer :: nferr - - - nferr = nf_open('topo/ETOPO2v2c_f4.nc', NF_SHARE, rd_ncid) - write(6,*) ' nferr ', nferr, rd_ncid - - ! - ! Get IDs for variable dimensions - ! - nferr = nf_inq_dimid(rd_ncid, 'x', rdDimIDnx) - write(6,*) ' nferr ', nferr, rdDimIDnx - nferr = nf_inq_dimlen(rd_ncid, rdDimIDnx, rdLocalnx) - write(6,*) ' nferr ', nferr, rdLocalnx - nferr = nf_inq_dimid(rd_ncid, 'y', rdDimIDny) - write(6,*) ' nferr ', nferr, rdDimIDny - nferr = nf_inq_dimlen(rd_ncid, rdDimIDny, rdLocalny) - write(6,*) ' nferr ', nferr, rdLocalny - - nx = rdLocalnx - ny = rdLocalny - - write(6,*) nx, ny - - ! - ! Get IDs for variables - ! - nferr = nf_inq_varid(rd_ncid, 'x', rdVarIDx) - write(6,*) ' nferr ', nferr, rdVarIDx - nferr = nf_inq_varid(rd_ncid, 'y', rdVarIDy) - write(6,*) ' nferr ', nferr, rdVarIDy - nferr = nf_inq_varid(rd_ncid, 'z', rdVarIDz) - write(6,*) ' nferr ', nferr, rdVarIDz - - end subroutine read_topo_init - - - subroutine read_topo_fields(x,y,z) - - implicit none - - include 'netcdf.inc' - - real (kind=4), dimension(:), intent(out) :: x,y - real (kind=4), dimension(:,:), intent(out) :: z - - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - integer :: nferr - - start1(1) = 1 - count1(1) = rdLocalnx - nferr = nf_get_vara_real(rd_ncid, rdVarIDx, start1, count1, x) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDx - - start1(1) = 1 - count1(1) = rdLocalny - nferr = nf_get_vara_real(rd_ncid, rdVarIDy, start1, count1, y) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDy - - start2(1) = 1 - start2(2) = 1 - count2(1) = rdLocalnx - count2(2) = rdLocalny - nferr = nf_get_vara_real(rd_ncid, rdVarIDz, start2, count2, z) - write(6,*) ' nferr ', nferr, rd_ncid, rdVarIDz, rdLocalnx - - end subroutine read_topo_fields - - - subroutine read_topo_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(rd_ncid) - write(6,*) ' nferr ', nferr - - - end subroutine read_topo_finalize - -end module read_topo diff --git a/grid_gen/basin/src/module_write_netcdf.F b/grid_gen/basin/src/module_write_netcdf.F deleted file mode 100644 index 8e6fdb5ad..000000000 --- a/grid_gen/basin/src/module_write_netcdf.F +++ /dev/null @@ -1,694 +0,0 @@ -module write_netcdf - - integer :: wr_ncid - integer :: wrDimIDTime - integer :: wrDimIDnCells - integer :: wrDimIDnEdges - integer :: wrDimIDnVertices - integer :: wrDimIDmaxEdges - integer :: wrDimIDmaxEdges2 - integer :: wrDimIDTWO - integer :: wrDimIDvertexDegree - integer :: wrDimIDnVertLevels - integer :: wrVarIDlatCell - integer :: wrVarIDlonCell - integer :: wrVarIDmeshDensity - integer :: wrVarIDxCell - integer :: wrVarIDyCell - integer :: wrVarIDzCell - integer :: wrVarIDindexToCellID - integer :: wrVarIDlatEdge - integer :: wrVarIDlonEdge - integer :: wrVarIDxEdge - integer :: wrVarIDyEdge - integer :: wrVarIDzEdge - integer :: wrVarIDindexToEdgeID - integer :: wrVarIDlatVertex - integer :: wrVarIDlonVertex - integer :: wrVarIDxVertex - integer :: wrVarIDyVertex - integer :: wrVarIDzVertex - integer :: wrVarIDindexToVertexID - integer :: wrVarIDmaxLevelCell - integer :: wrVarIDcellsOnEdge - integer :: wrVarIDnEdgesOnCell - integer :: wrVarIDnEdgesOnEdge - integer :: wrVarIDedgesOnCell - integer :: wrVarIDedgesOnEdge - integer :: wrVarIDweightsOnEdge - integer :: wrVarIDdvEdge - integer :: wrVarIDdcEdge - integer :: wrVarIDangleEdge - integer :: wrVarIDareaCell - integer :: wrVarIDareaTriangle - integer :: wrVarIDcellsOnCell - integer :: wrVarIDverticesOnCell - integer :: wrVarIDverticesOnEdge - integer :: wrVarIDedgesOnVertex - integer :: wrVarIDcellsOnVertex - integer :: wrVarIDkiteAreasOnVertex - integer :: wrVarIDfEdge - integer :: wrVarIDfVertex - integer :: wrVarIDfCell - integer :: wrVarIDbottomDepth - integer :: wrVarIDnormalVelocity - integer :: wrVarIDboundaryEdge - integer :: wrVarIDboundaryVertex - integer :: wrVarIDsurfaceWindStress - integer :: wrVarIDsurfaceWindStressZonal - integer :: wrVarIDsurfaceWindStressMeridional - integer :: wrVarIDlayerThickness - integer :: wrVarIDdensity - integer :: wrVarIDtemperature - integer :: wrVarIDsalinity - integer :: wrVarIDtracer1 - integer :: wrVarIDtemperatureRestore - integer :: wrVarIDsalinityRestore - integer :: wrVarIDboundaryLayerDepth - integer :: wrVarIDrefBottomDepth - - integer :: wrLocalnCells - integer :: wrLocalnEdges - integer :: wrLocalnVertices - integer :: wrLocalmaxEdges - integer :: wrLocalnVertLevels - integer :: wrLocalvertexDegree - - contains - - subroutine write_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - nVertLevels, & - vertexDegree, & - sphere_radius, & - on_a_sphere & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: nCells - integer, intent(in) :: nEdges - integer, intent(in) :: nVertices - integer, intent(in) :: maxEdges - integer, intent(in) :: nVertLevels - integer, intent(in) :: vertexDegree - character (len=16) :: on_a_sphere - real*8 :: sphere_radius - - - integer :: nferr - integer, dimension(10) :: dimlist - - - wrLocalnCells = nCells - wrLocalnEdges = nEdges - wrLocalnVertices = nVertices - wrLocalmaxEdges = maxEdges - wrLocalnVertLevels = nVertLevels - wrLocalvertexDegree = vertexDegree - - nferr = nf_create('ocean.nc', IOR(NF_CLOBBER,NF_64BIT_OFFSET), wr_ncid) - - ! - ! Define dimensions - ! - nferr = nf_def_dim(wr_ncid, 'nCells', nCells, wrDimIDnCells) - nferr = nf_def_dim(wr_ncid, 'nEdges', nEdges, wrDimIDnEdges) - nferr = nf_def_dim(wr_ncid, 'nVertices', nVertices, wrDimIDnVertices) - nferr = nf_def_dim(wr_ncid, 'maxEdges', maxEdges, wrDimIDmaxEdges) - nferr = nf_def_dim(wr_ncid, 'maxEdges2', 2*maxEdges, wrDimIDmaxEdges2) - nferr = nf_def_dim(wr_ncid, 'TWO', 2, wrDimIDTWO) - nferr = nf_def_dim(wr_ncid, 'vertexDegree', vertexDegree, wrDimIDvertexDegree) - nferr = nf_def_dim(wr_ncid, 'nVertLevels', nVertLevels, wrDimIDnVertLevels) - nferr = nf_def_dim(wr_ncid, 'Time', NF_UNLIMITED, wrDimIDTime) - - ! - ! Define variables - ! - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'latCell', NF_DOUBLE, 1, dimlist, wrVarIDlatCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'lonCell', NF_DOUBLE, 1, dimlist, wrVarIDlonCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'meshDensity', NF_DOUBLE, 1, dimlist, wrVarIDmeshDensity) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'xCell', NF_DOUBLE, 1, dimlist, wrVarIDxCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'yCell', NF_DOUBLE, 1, dimlist, wrVarIDyCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'zCell', NF_DOUBLE, 1, dimlist, wrVarIDzCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'indexToCellID', NF_INT, 1, dimlist, wrVarIDindexToCellID) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'latEdge', NF_DOUBLE, 1, dimlist, wrVarIDlatEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'lonEdge', NF_DOUBLE, 1, dimlist, wrVarIDlonEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'xEdge', NF_DOUBLE, 1, dimlist, wrVarIDxEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'yEdge', NF_DOUBLE, 1, dimlist, wrVarIDyEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'zEdge', NF_DOUBLE, 1, dimlist, wrVarIDzEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'indexToEdgeID', NF_INT, 1, dimlist, wrVarIDindexToEdgeID) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'latVertex', NF_DOUBLE, 1, dimlist, wrVarIDlatVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'lonVertex', NF_DOUBLE, 1, dimlist, wrVarIDlonVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'xVertex', NF_DOUBLE, 1, dimlist, wrVarIDxVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'yVertex', NF_DOUBLE, 1, dimlist, wrVarIDyVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'zVertex', NF_DOUBLE, 1, dimlist, wrVarIDzVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'indexToVertexID', NF_INT, 1, dimlist, wrVarIDindexToVertexID) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'maxLevelCell', NF_INT, 1, dimlist, wrVarIDmaxLevelCell) - dimlist( 1) = wrDimIDTWO - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'cellsOnEdge', NF_INT, 2, dimlist, wrVarIDcellsOnEdge) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'nEdgesOnCell', NF_INT, 1, dimlist, wrVarIDnEdgesOnCell) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'nEdgesOnEdge', NF_INT, 1, dimlist, wrVarIDnEdgesOnEdge) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'edgesOnCell', NF_INT, 2, dimlist, wrVarIDedgesOnCell) - dimlist( 1) = wrDimIDmaxEdges2 - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'edgesOnEdge', NF_INT, 2, dimlist, wrVarIDedgesOnEdge) - dimlist( 1) = wrDimIDmaxEdges2 - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'weightsOnEdge', NF_DOUBLE, 2, dimlist, wrVarIDweightsOnEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dvEdge', NF_DOUBLE, 1, dimlist, wrVarIDdvEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dcEdge', NF_DOUBLE, 1, dimlist, wrVarIDdcEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'angleEdge', NF_DOUBLE, 1, dimlist, wrVarIDangleEdge) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'areaCell', NF_DOUBLE, 1, dimlist, wrVarIDareaCell) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'areaTriangle', NF_DOUBLE, 1, dimlist, wrVarIDareaTriangle) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'cellsOnCell', NF_INT, 2, dimlist, wrVarIDcellsOnCell) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'verticesOnCell', NF_INT, 2, dimlist, wrVarIDverticesOnCell) - dimlist( 1) = wrDimIDTWO - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'verticesOnEdge', NF_INT, 2, dimlist, wrVarIDverticesOnEdge) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'edgesOnVertex', NF_INT, 2, dimlist, wrVarIDedgesOnVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'cellsOnVertex', NF_INT, 2, dimlist, wrVarIDcellsOnVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'kiteAreasOnVertex', NF_DOUBLE, 2, dimlist, wrVarIDkiteAreasOnVertex) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'fEdge', NF_DOUBLE, 1, dimlist, wrVarIDfEdge) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'fVertex', NF_DOUBLE, 1, dimlist, wrVarIDfVertex) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'fCell', NF_DOUBLE, 1, dimlist, wrVarIDfCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'bottomDepth', NF_DOUBLE, 1, dimlist, wrVarIDbottomDepth) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'temperatureRestore', NF_DOUBLE, 1, dimlist, wrVarIDtemperatureRestore) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'salinityRestore', NF_DOUBLE, 1, dimlist, wrVarIDsalinityRestore) - - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'boundaryLayerDepth', NF_DOUBLE, 1, dimlist, wrVarIDboundaryLayerDepth) - - dimlist( 1) = wrDimIDnVertLevels - nferr = nf_def_var(wr_ncid, 'refBottomDepth', NF_DOUBLE, 1, dimlist, wrVarIDrefBottomDepth) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'normalVelocity', NF_DOUBLE, 3, dimlist, wrVarIDnormalVelocity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'boundaryEdge', NF_INT, 2, dimlist, wrVarIDboundaryEdge) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'boundaryVertex', NF_INT, 2, dimlist, wrVarIDboundaryVertex) - - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'surfaceWindStress', NF_DOUBLE, 1, dimlist, wrVarIDsurfaceWindStress) - - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'surfaceWindStressZonal', NF_DOUBLE, 1, dimlist, wrVarIDsurfaceWindStressZonal) - - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'surfaceWindStressMeridional', NF_DOUBLE, 1, dimlist, wrVarIDsurfaceWindStressMeridional) - - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'layerThickness', NF_DOUBLE, 3, dimlist, & - wrVarIDlayerThickness) - ! dimlist( 1) = wrDimIDnVertLevels - ! dimlist( 2) = wrDimIDnCells - ! dimlist( 3) = wrDimIDTime - ! nferr = nf_def_var(wr_ncid, 'density', NF_DOUBLE, 3, dimlist, wrVarIDdensity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'temperature', NF_DOUBLE, 3, dimlist, wrVarIDtemperature) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'salinity', NF_DOUBLE, 3, dimlist, wrVarIDsalinity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - ! If you do not want tracer1 in your input file, simply comment out these two lines (one of two) - nferr = nf_def_var(wr_ncid, 'tracer1', NF_DOUBLE, 3, dimlist, wrVarIDtracer1) - - - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'on_a_sphere', 16, on_a_sphere) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'sphere_radius', NF_DOUBLE, 1, sphere_radius) - - nferr = nf_enddef(wr_ncid) - - end subroutine write_netcdf_init - - - subroutine write_netcdf_fields( & - time, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - maxLevelCell, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - fCell, & - bottomDepth, & - boundaryEdge, & - boundaryVertex, & - surfaceWindStress, & - surfaceWindStressZonal, & - surfaceWindStressMeridional, & - normalVelocity, & - layerThickness, & - density, & - temperature, & - salinity, & - tracer1, & - temperatureRestore, & - salinityRestore, & - boundaryLayerDepth, & - refBottomDepth & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: time - real (kind=8), dimension(:), intent(in) :: latCell - real (kind=8), dimension(:), intent(in) :: lonCell - real (kind=8), dimension(:), intent(in) :: meshDensity - real (kind=8), dimension(:), intent(in) :: xCell - real (kind=8), dimension(:), intent(in) :: yCell - real (kind=8), dimension(:), intent(in) :: zCell - integer, dimension(:), intent(in) :: indexToCellID - real (kind=8), dimension(:), intent(in) :: latEdge - real (kind=8), dimension(:), intent(in) :: lonEdge - real (kind=8), dimension(:), intent(in) :: xEdge - real (kind=8), dimension(:), intent(in) :: yEdge - real (kind=8), dimension(:), intent(in) :: zEdge - integer, dimension(:), intent(in) :: indexToEdgeID - real (kind=8), dimension(:), intent(in) :: latVertex - real (kind=8), dimension(:), intent(in) :: lonVertex - real (kind=8), dimension(:), intent(in) :: xVertex - real (kind=8), dimension(:), intent(in) :: yVertex - real (kind=8), dimension(:), intent(in) :: zVertex - integer, dimension(:), intent(in) :: indexToVertexID - integer, dimension(:), intent(in) :: maxLevelCell - integer, dimension(:,:), intent(in) :: cellsOnEdge - integer, dimension(:), intent(in) :: nEdgesOnCell - integer, dimension(:), intent(in) :: nEdgesOnEdge - integer, dimension(:,:), intent(in) :: edgesOnCell - integer, dimension(:,:), intent(in) :: edgesOnEdge - real (kind=8), dimension(:,:), intent(in) :: weightsOnEdge - real (kind=8), dimension(:), intent(in) :: dvEdge - real (kind=8), dimension(:), intent(in) :: dcEdge - real (kind=8), dimension(:), intent(in) :: angleEdge - real (kind=8), dimension(:), intent(in) :: areaCell - real (kind=8), dimension(:), intent(in) :: areaTriangle - integer, dimension(:,:), intent(in) :: cellsOnCell - integer, dimension(:,:), intent(in) :: verticesOnCell - integer, dimension(:,:), intent(in) :: verticesOnEdge - integer, dimension(:,:), intent(in) :: edgesOnVertex - integer, dimension(:,:), intent(in) :: cellsOnVertex - real (kind=8), dimension(:,:), intent(in) :: kiteAreasOnVertex - real (kind=8), dimension(:), intent(in) :: fEdge - real (kind=8), dimension(:), intent(in) :: fVertex - real (kind=8), dimension(:), intent(in) :: fCell - real (kind=8), dimension(:), intent(in) :: bottomDepth - integer, dimension(:,:), intent(in) :: boundaryEdge - integer, dimension(:,:), intent(in) :: boundaryVertex - real (kind=8), dimension(:), intent(in) :: surfaceWindStress - real (kind=8), dimension(:), intent(in) :: surfaceWindStressZonal - real (kind=8), dimension(:), intent(in) :: surfaceWindStressMeridional - real (kind=8), dimension(:,:,:), intent(in) :: normalVelocity - real (kind=8), dimension(:,:,:), intent(in) :: layerThickness - real (kind=8), dimension(:,:,:), intent(in) :: density - real (kind=8), dimension(:,:,:), intent(in) :: temperature - real (kind=8), dimension(:,:,:), intent(in) :: salinity - real (kind=8), dimension(:,:,:), intent(in) :: tracer1 - real (kind=8), dimension(:), intent(in) :: temperatureRestore - real (kind=8), dimension(:), intent(in) :: salinityRestore - real (kind=8), dimension(:), intent(in) :: boundaryLayerDepth - real (kind=8), dimension(:), intent(in) :: refBottomDepth - - - integer :: nferr - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - start1(1) = 1 - - start2(1) = 1 - start2(2) = 1 - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start4(1) = 1 - start4(2) = 1 - start4(3) = 1 - start4(4) = 1 - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatCell, start1, count1, latCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonCell, start1, count1, lonCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDmeshDensity, start1, count1, meshDensity) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDxCell, start1, count1, xCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDyCell, start1, count1, yCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDzCell, start1, count1, zCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToCellID, start1, count1, indexToCellID) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatEdge, start1, count1, latEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonEdge, start1, count1, lonEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDxEdge, start1, count1, xEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDyEdge, start1, count1, yEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDzEdge, start1, count1, zEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToEdgeID, start1, count1, indexToEdgeID) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatVertex, start1, count1, latVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonVertex, start1, count1, lonVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDxVertex, start1, count1, xVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDyVertex, start1, count1, yVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDzVertex, start1, count1, zVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToVertexID, start1, count1, indexToVertexID) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDmaxLevelCell, start1, count1, maxLevelCell) - - start2(2) = 1 - count2( 1) = 2 - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnEdge, start2, count2, cellsOnEdge) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDnEdgesOnCell, start1, count1, nEdgesOnCell) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDnEdgesOnEdge, start1, count1, nEdgesOnEdge) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnCell, start2, count2, edgesOnCell) - - start2(2) = 1 - count2( 1) = 2*wrLocalmaxEdges - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnEdge, start2, count2, edgesOnEdge) - - start2(2) = 1 - count2( 1) = 2*wrLocalmaxEdges - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDweightsOnEdge, start2, count2, weightsOnEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdvEdge, start1, count1, dvEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdcEdge, start1, count1, dcEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDangleEdge, start1, count1, angleEdge) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDareaCell, start1, count1, areaCell) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDareaTriangle, start1, count1, areaTriangle) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnCell, start2, count2, cellsOnCell) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDverticesOnCell, start2, count2, verticesOnCell) - - start2(2) = 1 - count2( 1) = 2 - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDverticesOnEdge, start2, count2, verticesOnEdge) - - start2(2) = 1 - count2( 1) = wrLocalvertexDegree - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnVertex, start2, count2, edgesOnVertex) - - start2(2) = 1 - count2( 1) = wrLocalvertexDegree - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnVertex, start2, count2, cellsOnVertex) - - start2(2) = 1 - count2( 1) = wrLocalvertexDegree - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDkiteAreasOnVertex, start2, count2, kiteAreasOnVertex) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDfEdge, start1, count1, fEdge) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDfVertex, start1, count1, fVertex) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDfCell, start1, count1, fCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDbottomDepth, start1, count1, bottomDepth) - - start2(2) = 1 - count2( 1) = wrLocalnVertLevels - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDboundaryEdge, start2, count2, boundaryEdge) - - start2(2) = 1 - count2( 1) = wrLocalnVertLevels - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDboundaryVertex, start2, count2, boundaryVertex) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDsurfaceWindStress, start1, count1, surfaceWindStress) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDsurfaceWindStressZonal, start1, count1, surfaceWindStressZonal) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDsurfaceWindStressMeridional, start1, count1, surfaceWindStressMeridional) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDtemperatureRestore, start1, count1, temperatureRestore) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDsalinityRestore, start1, count1, salinityRestore) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDboundaryLayerDepth, start1, count1, boundaryLayerDepth) - - start1(1) = 1 - count1( 1) = wrLocalnVertLevels - nferr = nf_put_vara_double(wr_ncid, wrVarIDrefBottomDepth, start1, count1, refBottomDepth) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDnormalVelocity, start3, count3, normalVelocity) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDlayerThickness, start3, count3, layerThickness) - - ! start3(3) = time - ! count3( 1) = wrLocalnVertLevels - ! count3( 2) = wrLocalnCells - ! count3( 3) = 1 - ! nferr = nf_put_vara_double(wr_ncid, wrVarIDdensity, start3, count3, density) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDtemperature, start3, count3, temperature) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDsalinity, start3, count3, salinity) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - ! If you do not want tracer1 in your input file, simply comment out these two lines (two of two) - nferr = nf_put_vara_double(wr_ncid, wrVarIDtracer1, start3, count3, tracer1) - - end subroutine write_netcdf_fields - - - subroutine write_netcdf_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(wr_ncid) - - end subroutine write_netcdf_finalize - -end module write_netcdf diff --git a/grid_gen/basin/src/utilities.F b/grid_gen/basin/src/utilities.F deleted file mode 100644 index a5a97a351..000000000 --- a/grid_gen/basin/src/utilities.F +++ /dev/null @@ -1,781 +0,0 @@ -module utilities - -contains - -subroutine write_OpenDX( on_a_sphere, & - nCells, & - nVertices, & - nEdges, & - vertexDegree, & - maxEdges, & - xCell, & - yCell, & - zCell, & - xVertex, & - yVertex, & - zVertex, & - xEdge, & - yEdge, & - zEdge, & - nEdgesOnCell, & - verticesOnCell, & - verticesOnEdge, & - cellsOnVertex, & - edgesOnCell, & - areaCell, & - maxLevelCell, & - meshSpacing, & - depthCell, & - SST, & - kiteAreasOnVertex ) - - implicit none - - character (len=16), intent(in) :: on_a_sphere - integer, intent(in) :: nCells, nVertices, vertexDegree, nEdges, maxEdges - real (kind=8), dimension(nCells), intent(inout) :: xCell - real (kind=8), dimension(nCells), intent(inout) :: yCell - real (kind=8), dimension(nCells), intent(inout) :: zCell - real (kind=8), dimension(nVertices), intent(inout) :: xVertex - real (kind=8), dimension(nVertices), intent(inout) :: yVertex - real (kind=8), dimension(nVertices), intent(inout) :: zVertex - real (kind=8), dimension(nEdges), intent(inout) :: xEdge - real (kind=8), dimension(nEdges), intent(inout) :: yEdge - real (kind=8), dimension(nEdges), intent(inout) :: zEdge - integer, dimension(nCells), intent(in) :: nEdgesOnCell - integer, dimension(maxEdges,nCells), intent(in) :: verticesOnCell - integer, dimension(maxEdges,nCells), intent(in) :: edgesOnCell - integer, dimension(2,nEdges), intent(in) :: verticesOnEdge - integer, dimension(vertexDegree, nVertices), intent(in) :: cellsOnVertex - integer, dimension(nCells), intent(in) :: maxLevelCell - real (kind=8), dimension(nCells), intent(in) :: areaCell - real (kind=8), dimension(nCells), intent(in) :: depthCell, SST, meshSpacing - real (kind=8), dimension(vertexDegree,nVertices), intent(in) :: kiteAreasOnVertex - - character(len=80) :: a, b, c, d, e, f - integer :: i, j, k, nVerticesTotal, iEdge, iLoop, iFace, Vert(4), Edge(4), iVertex, i1, i2, jp1 - integer :: nKitesTotal, iCell, iEdge1, iEdge2, iVertex11, iVertex12, iVertex21, iVertex22, ksave - real (kind=8) :: x1, x2, x3, x4, y1, y2, y3, y4, z1, z2, z3, z4, xscale, work(nCells), work1(nCells), work2(nCells) - real (kind=8) :: xv, yv, zv, xc, yc, zc, dist - logical (kind=8) :: eflag - - if(on_a_sphere.eq.'NO ') then - write(6,*) ' write_dx, not on a sphere ' - endif - - xscale = 1.00 - xCell = xCell*xscale - yCell = yCell*xscale - zCell = zCell*xscale - xVertex = xVertex*xscale - yVertex = yVertex*xscale - zVertex = zVertex*xscale - xEdge = xEdge*xscale - yEdge = yEdge*xscale - zEdge = zEdge*xscale - - write(6,*) 'xCell', minval(xCell), maxval(xCell) - write(6,*) ' nCells', nCells - write(6,*) ' nEdges', nEdges - write(6,*) ' nVertices', nVertices - write(6,*) ' nEdgesOnCell',minval(nEdgesOnCell), maxval(nEdgesOnCell) - - open(unit=1,file='dx/vector.dx',form='formatted',status='unknown') - - a = trim('object "positions list" class array type float rank 1 shape 3 items') - b = trim('ascii data file vector.position.data') - write(1,10) a, nCells - write(1,10) b - write(1,*) - - a = trim('object 0 class array type float rank 1 shape 3 items') - b = trim('ascii data file vector.data') - c = trim('attribute "dep" string "positions"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "vector" class field') - b = trim('component "positions" "positions list"') - c = trim('component "data" 0') - write(1,10) a - write(1,10) b - write(1,10) c - - close(1) - - open(unit=14,file='dx/vector.position.data',form='formatted',status='unknown') - do i=1,nCells - write(14,22) xCell(i), yCell(i), zCell(i) - enddo - close(14) - - - - nVerticesTotal = 0 - do i=1,nCells - nVerticesTotal = nVerticesTotal + nEdgesOnCell(i) - enddo - write(6,*) 'total number of vertices', nVerticesTotal - - open(unit=1,file='dx/ocean.dx',form='formatted',status='unknown') - - a = trim('object "positions list" class array type float rank 1 shape 3 items') - b = trim('ascii data file ocean.position.data') - write(1,10) a, nVerticesTotal - write(1,10) b - write(1,*) - 10 format(a70,i10) - - a = trim('object "edge list" class array type int rank 0 items') - b = trim('ascii data file ocean.edge.data') - c = trim('attribute "ref" string "positions"') - write(1,10) a, nVerticesTotal - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "loops list" class array type int rank 0 items') - b = trim('ascii data file ocean.loop.data') - c = trim('attribute "ref" string "edges"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "face list" class array type int rank 0 items') - b = trim('ascii data file ocean.face.data') - c = trim('attribute "ref" string "loops"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object 0 class array type float rank 0 items') - b = trim('data file ocean.meshSpacing.data') - c = trim('attribute "dep" string "faces"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "area" class field') - b = trim('component "positions" "positions list"') - c = trim('component "edges" "edge list"') - d = trim('component "loops" "loops list"') - e = trim('component "faces" "face list"') - f = trim('component "data" 0') - write(1,10) a - write(1,10) b - write(1,10) c - write(1,10) d - write(1,10) e - write(1,10) f - - close(1) - - - work2 = meshSpacing - work1 = depthCell - work = SST - - open(unit= 8,file='dx/ocean.meshSpacing.data',form='formatted',status='unknown') - open(unit= 9,file='dx/ocean.depth.data',form='formatted',status='unknown') - open(unit=10,file='dx/ocean.area.data',form='formatted',status='unknown') - open(unit=11,file='dx/ocean.face.data',form='formatted',status='unknown') - open(unit=12,file='dx/ocean.loop.data',form='formatted',status='unknown') - open(unit=13,file='dx/ocean.edge.data',form='formatted',status='unknown') - open(unit=14,file='dx/ocean.position.data',form='formatted',status='unknown') - - iLoop = 0 - iEdge = 0 - do i=1,nCells - write(8,20) work2(i) - write(9,20) work1(i) - write(10,20) work(i) - write(11,21) i-1 - write(12,21) iLoop - iLoop = iLoop + nEdgesOnCell(i) - - eflag = .false. - do j=1,nEdgesOnCell(i) - k = verticesOnCell(j,i) - xv = xVertex(k); yv = yVertex(k); zv = zVertex(k) - xc = xCell(i); yc = yCell(i); zc = zCell(i) - dist = sqrt( (xc-xv)**2 + (yc-yv)**2 + (zc-zv)**2 ) - if(dist.gt.5.0e5.and.on_a_sphere.eq.'NO ') then - eflag = .true. - endif - enddo - - if(eflag) then - - do j=1,nEdgesOnCell(i) - write(13,21) iEdge - iEdge = iEdge + 1 - k = verticesOnCell(j,i) - xv = xVertex(k); yv = yVertex(k); zv = zVertex(k) - xc = xCell(i); yc = yCell(i); zc = zCell(i) - dist = sqrt( (xc-xv)**2 + (yc-yv)**2 + (zc-zv)**2 ) - if(dist.gt.5.0e5) then - write(14,22) xc, yc, zc - else - write(14,22) xv, yv, zv - endif - enddo - - else - - do j=1,nEdgesOnCell(i) - write(13,21) iEdge - iEdge = iEdge + 1 - k = verticesOnCell(j,i) - if(k.le.0) write(6,*) ' vert1 ',k, verticesOnCell(:,i) - write(14,22) xVertex(k), yVertex(k), zVertex(k) - write(15,23) j,i,k,xVertex(k), yVertex(k), zVertex(k) - enddo - endif - enddo - - 20 format(e20.10) - 21 format(i20) - 22 format(3e20.10) - 23 format(3i8, 3e20.10) - - close(9) - close(10) - close(11) - close(12) - close(13) - close(14) - - ! nVerticesTotal = 0 - ! nKitesTotal = 0 - ! do i=1,nCells - ! nKitesTotal = nKitesTotal + nEdgesOnCell(i) - ! enddo - ! nVerticesTotal = nKitesTotal*4 - ! write(6,*) nKitesTotal, nVerticesTotal - - ! open(unit=1,file='dx/kite.dx',form='formatted',status='unknown') - - ! a = trim('object "positions list" class array type float rank 1 shape 3 items') - ! b = trim('ascii data file kite.position.data') - ! write(1,10) a, nVerticesTotal - ! write(1,10) b - ! write(1,*) - - ! a = trim('object "edge list" class array type int rank 0 items') - ! b = trim('ascii data file kite.edge.data') - ! c = trim('attribute "ref" string "positions"') - ! write(1,10) a, nVerticesTotal - ! write(1,10) b - ! write(1,10) c - ! write(1,*) - - ! a = trim('object "loops list" class array type int rank 0 items') - ! b = trim('ascii data file kite.loop.data') - ! c = trim('attribute "ref" string "edges"') - ! write(1,10) a, nKitesTotal - ! write(1,10) b - ! write(1,10) c - ! write(1,*) - - ! a = trim('object "face list" class array type int rank 0 items') - ! b = trim('ascii data file kite.face.data') - ! c = trim('attribute "ref" string "loops"') - ! write(1,10) a, nKitesTotal - ! write(1,10) b - ! write(1,10) c - ! write(1,*) - - ! a = trim('object 0 class array type float rank 0 items') - ! b = trim('data file kite.area.data') - ! c = trim('attribute "dep" string "faces"') - ! write(1,10) a, nKitesTotal - ! write(1,10) b - ! write(1,10) c - ! write(1,*) - - ! a = trim('object "area" class field') - ! b = trim('component "positions" "positions list"') - ! c = trim('component "edges" "edge list"') - ! d = trim('component "loops" "loops list"') - ! e = trim('component "faces" "face list"') - ! f = trim('component "data" 0') - ! write(1,10) a - ! write(1,10) b - ! write(1,10) c - ! write(1,10) d - ! write(1,10) e - ! write(1,10) f - - ! close(1) - - ! open(unit=10,file='dx/kite.area.data',form='formatted',status='unknown') - ! open(unit=11,file='dx/kite.face.data',form='formatted',status='unknown') - ! open(unit=12,file='dx/kite.loop.data',form='formatted',status='unknown') - ! open(unit=13,file='dx/kite.edge.data',form='formatted',status='unknown') - ! open(unit=14,file='dx/kite.position.data',form='formatted',status='unknown') - - ! iLoop = 0 - ! iEdge = 0 - ! iFace = 0 - - ! do iCell=1,nCells - ! do j=1,nEdgesOnCell(iCell) - ! iEdge1 = edgesOnCell(j,iCell) - ! jp1 = j+1 - ! if(j.eq.nEdgesOnCell(iCell)) jp1=1 - ! iEdge2 = edgesOnCell(jp1,iCell) - - ! iVertex11 = verticesOnEdge(1,iEdge1) - ! iVertex21 = verticesOnEdge(2,iEdge1) - ! iVertex12 = verticesOnEdge(1,iEdge2) - ! ivertex22 = verticesOnEdge(2,iEdge2) - - ! if(iVertex11.eq.iVertex12.or.iVertex11.eq.iVertex22) then - ! iVertex = iVertex11 - ! elseif(iVertex21.eq.iVertex12.or.iVertex21.eq.iVertex22) then - ! iVertex = iVertex21 - ! else - ! write(6,*) iVertex11, iVertex21, iVertex12, iVertex22 - ! stop - ! endif - - ! ksave = 0 - ! do k=1,vertexDegree - ! if(cellsOnVertex(k,iVertex).eq.iCell) ksave=k - ! enddo - ! if(ksave.eq.0) then - ! write(6,*) ' can not find iCell' - ! write(6,*) cellsOnVertex(:,iVertex) - ! write(6,*) iCell - ! write(6,*) iEdge1, iEdge2 - ! write(6,*) iVertex11, iVertex21, iVertex21, iVertex22 - ! write(6,*) iVertex - ! stop - ! endif - - ! write(11,21) iFace - ! write(12,21) iLoop - ! iFace = iFace + 1 - ! iLoop = iLoop + 4 - ! do k=1,4 - ! write(13,21) iEdge - ! iEdge = iEdge + 1 - ! enddo - ! - ! x1 = xCell(iCell) ; y1 = yCell(iCell) ; z1 = zCell(iCell) - ! x2 = xEdge(iEdge1) ; y2 = yEdge(iEdge1) ; z2 = zEdge(iEdge1) - ! x3 = xVertex(iVertex); y3 = yVertex(iVertex); z3 = zVertex(iVertex) - ! x4 = xEdge(iEdge2) ; y4 = yEdge(iEdge2) ; z4 = zEdge(iEdge2) - ! - ! write(14,22) x1, y1, z1 - ! write(14,22) x2, y2, z2 - ! write(14,22) x3, y3, z3 - ! write(14,22) x4, y4, z4 - ! write(10,22) kiteAreasOnVertex(ksave,iVertex) - - ! enddo - ! enddo - -end subroutine write_OpenDX - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CONVERT_LX -! -! Convert (lat,lon) to an (x, y, z) location on a sphere with specified radius. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine convert_lx(x, y, z, radius, lat, lon) - - implicit none - - real, intent(in) :: radius - real, intent(in) :: lat, lon - real, intent(out) :: x, y, z - - z = radius * sin(lat) - x = radius * cos(lon) * cos(lat) - y = radius * sin(lon) * cos(lat) - -end subroutine convert_lx - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CONVERT_XL -! -! Convert (x, y, z) to a (lat, lon) location on a sphere with -! radius sqrt(x^2 + y^2 + z^2). -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine convert_xl(x, y, z, lat,lon) - - implicit none - - real, intent(in) :: x, y, z - real, intent(out) :: lat, lon - - real :: dl, clat, pii, rtod - real :: eps - parameter (eps=1.e-10) - - pii = 2.*asin(1.0) - rtod=180./pii - dl = sqrt(x*x + y*y + z*z) - - lat = asin(z/dl) - -! check for being close to either pole - - if (abs(x) > eps) then - - if (abs(y) > eps) then - - lon = atan(abs(y/x)) - - if ((x <= 0.) .and. (y >= 0.)) then - lon = pii-lon - else if ((x <= 0.) .and. (y < 0.)) then - lon = lon+pii - else if ((x >= 0.) .and. (y <= 0.)) then - lon = 2*pii-lon - end if - - else ! we're either on longitude 0 or 180 - - if (x > 0) then - lon = 0. - else - lon = pii - end if - - end if - - else if (abs(y) > eps) then - - if (y > 0) then - lon = pii/2. - else - lon = 3.*pii/2. - end if - - else ! we are at a pole - - lon = 0. - - end if - -end subroutine convert_xl - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine transform_from_lonlat_to_xyz(xin, yin, zin, ulon, ulat, ux, uy, uz) -! -! transform vector measured in latitude/longitude space to a vector measured in x,y,z -! -! INTENT(IN) -! xin = x position -! yin = y position -! zin = z position -! ulon = east component of vector -! ulat = north component of vector -! -! INTENT(OUT) -! ux = x component of vector -! uy = y component of vector -! uz = z component of vector -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - -implicit none -real, intent(in) :: xin, yin, zin, ulon, ulat -real, intent(out) :: ux, uy, uz -real :: h(3,3), p(3), q(3), g(3), X1(3,3), X2(3,3), trans_X2_to_X1(3,3), r -integer :: i,j,k -logical :: l_Pole -real, parameter :: epsvt = 1.0e-10 - -!----------------------------------------------------------------------- -! define the e1, e2, and e3 directions -!----------------------------------------------------------------------- - X1(1,1) = 1.0; X1(1,2) = 0.0; X1(1,3) = 0.0 - X1(2,1) = 0.0; X1(2,2) = 1.0; X1(2,3) = 0.0 - X1(3,1) = 0.0; X1(3,2) = 0.0; X1(3,3) = 1.0 - -!----------------------------------------------------------------------- -! find the vectors (measured in X1) that point in the local -! east (h(1,:)), north (h(2,:)), and vertical (h(3,:)) direction -!----------------------------------------------------------------------- - h(3,1) = xin; h(3,2) = yin; h(3,3) = zin - call unit_vector_in_3space(h(3,:)) - -!----------------------------------------------------------------------- -! g(:) is a work array and holds the vector pointing to the North Pole. -! measured in X1 -!----------------------------------------------------------------------- - g(:) = X1(3,:) - -!----------------------------------------------------------------------- -! determine if the local vertical hits a pole -!----------------------------------------------------------------------- - l_Pole = .false. - r = g(1)*h(3,1) + g(2)*h(3,2) + g(3)*h(3,3) - r = abs(r) + epsvt - if(r.gt.1.0) then - l_Pole = .true. - h(3,:) = h(3,:) + epsvt - call unit_vector_in_3space(h(3,:)) - endif - -!----------------------------------------------------------------------- -! find the vector that is perpendicular to the local vertical vector -! and points in the direction of of the North pole, this defines the local -! north direction. measured in X1 -!----------------------------------------------------------------------- - call vector_on_tangent_plane ( h(3,:), g(:), h(2,:) ) - -!----------------------------------------------------------------------- -! take the cross product of the local North direction and the local vertical -! to find the local east vector. still in X1 -!----------------------------------------------------------------------- - call cross_product_in_3space ( h(2,:), h(3,:), h(1,:) ) - -!----------------------------------------------------------------------- -! put these 3 vectors into a matrix X2 -!----------------------------------------------------------------------- - X2(1,:) = h(1,:) ! local east (measured in X1) - X2(2,:) = h(2,:) ! local north (measured in X1) - X2(3,:) = h(3,:) ! local vertical (measured in X1) - -!----------------------------------------------------------------------- -! compute the transformation matrix -!----------------------------------------------------------------------- - trans_X2_to_X1(:,:) = matmul(X1,transpose(X2)) - -!----------------------------------------------------------------------- -! transform (ulon, ulat) into (x,y,z) -!----------------------------------------------------------------------- - p(1) = ulon; p(2) = ulat; p(3) = 0 - g(:) = matmul(trans_X2_to_X1(:, :), p(:)) - ux = g(1); uy = g(2); uz = g(3) - -end subroutine transform_from_lonlat_to_xyz - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine transform_from_xyz_to_lonlat(xin, yin, zin, ux, uy, uz, ulon, ulat) -! -! transform vector measured in x,y,z space to a vector measured in latitude/longitude space -! -! INTENT(IN) -! xin = x position -! yin = y position -! zin = z position -! ux = x component of vector -! uy = y component of vector -! uz = z component of vector -! -! INTENT(OUT) -! ulon = east component of vector -! ulat = north component of vector -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - -implicit none -real, intent(in) :: xin, yin, zin, ux, uy, uz -real, intent(out) :: ulon, ulat -real :: h(3,3), p(3), q(3), g(3), X1(3,3), X2(3,3), trans_X1_to_X2(3,3), r -integer :: i,j,k -logical :: l_Pole -real, parameter :: epsvt = 1.0e-10 - -!----------------------------------------------------------------------- -! define the e1, e2, and e3 directions -!----------------------------------------------------------------------- - X1(1,1) = 1.0; X1(1,2) = 0.0; X1(1,3) = 0.0 - X1(2,1) = 0.0; X1(2,2) = 1.0; X1(2,3) = 0.0 - X1(3,1) = 0.0; X1(3,2) = 0.0; X1(3,3) = 1.0 - -!----------------------------------------------------------------------- -! find the vectors (measured in X1) that point in the local -! east (h(1,:)), north (h(2,:)), and vertical (h(3,:)) direction -!----------------------------------------------------------------------- - h(3,1) = xin; h(3,2) = yin; h(3,3) = zin - call unit_vector_in_3space(h(3,:)) - -!----------------------------------------------------------------------- -! g(:) is a work array and holds the vector pointing to the North Pole. -! measured in X1 -!----------------------------------------------------------------------- - g(:) = X1(3,:) - -!----------------------------------------------------------------------- -! determine if the local vertical hits a pole -!----------------------------------------------------------------------- - l_Pole = .false. - r = g(1)*h(3,1) + g(2)*h(3,2) + g(3)*h(3,3) - r = abs(r) + epsvt - if(r.gt.1.0) then - l_Pole = .true. - h(3,:) = h(3,:) + epsvt - call unit_vector_in_3space(h(3,:)) - endif - -!----------------------------------------------------------------------- -! find the vector that is perpendicular to the local vertical vector -! and points in the direction of of the North pole, this defines the local -! north direction. measured in X1 -!----------------------------------------------------------------------- - call vector_on_tangent_plane ( h(3,:), g(:), h(2,:) ) - -!----------------------------------------------------------------------- -! take the cross product of the local North direction and the local vertical -! to find the local east vector. still in X1 -!----------------------------------------------------------------------- - call cross_product_in_3space ( h(2,:), h(3,:), h(1,:) ) - -!----------------------------------------------------------------------- -! put these 3 vectors into a matrix X2 -!----------------------------------------------------------------------- - X2(1,:) = h(1,:) ! local east (measured in X1) - X2(2,:) = h(2,:) ! local north (measured in X1) - X2(3,:) = h(3,:) ! local vertical (measured in X1) - -!----------------------------------------------------------------------- -! compute the transformation matrix -!----------------------------------------------------------------------- - trans_X1_to_X2(:,:) = matmul(X2,transpose(X1)) - -!----------------------------------------------------------------------- -! transform (ulon, ulat) into (x,y,z) -!----------------------------------------------------------------------- - p(1) = ux; p(2) = uy; p(3) = uz - g(:) = matmul(trans_X1_to_X2(:, :), p(:)) - ulon = g(1); ulat= g(2); - -end subroutine transform_from_xyz_to_lonlat - -!====================================================================== -! BEGINNING OF UNIT_VECTOR_IN_3SPACE -!====================================================================== - subroutine unit_vector_in_3space (p_1) - -!----------------------------------------------------------------------- -! PURPOSE : normalize p_1 to unit length and overwrite p_1 -!----------------------------------------------------------------------- - -!----------------------------------------------------------------------- -! intent(inout) -!----------------------------------------------------------------------- - real , intent(inout) :: & - p_1 (:) - -!----------------------------------------------------------------------- -! local -!----------------------------------------------------------------------- - real :: length - - length = SQRT (p_1(1)**2 + p_1(2)**2 + p_1(3)**2 ) - length = 1.0/length - p_1(1) = p_1(1)*length - p_1(2) = p_1(2)*length - p_1(3) = p_1(3)*length - - end subroutine unit_vector_in_3space -!====================================================================== -! END OF UNIT_VECTOR_IN_3SPACE -!====================================================================== - -!====================================================================== -! BEGINNING OF CROSS_PRODUCT_IN_3SPACE -!====================================================================== - subroutine cross_product_in_3space(p_1,p_2,p_out) - -!----------------------------------------------------------------------- -! PURPOSE: compute p_1 cross p_2 and place in p_out -!----------------------------------------------------------------------- - -!----------------------------------------------------------------------- -! intent(in) -!----------------------------------------------------------------------- - real , intent(in) :: & - p_1 (:), & - p_2 (:) - -!----------------------------------------------------------------------- -! intent(out) -!----------------------------------------------------------------------- - real , intent(out) :: & - p_out (:) - - p_out(1) = p_1(2)*p_2(3)-p_1(3)*p_2(2) - p_out(2) = p_1(3)*p_2(1)-p_1(1)*p_2(3) - p_out(3) = p_1(1)*p_2(2)-p_1(2)*p_2(1) - - end subroutine cross_product_in_3space -!====================================================================== -! END OF CROSS_PRODUCT_IN_3SPACE -!====================================================================== - -!====================================================================== -! BEGINNING OF VECTOR_ON_TANGENT_PLANE -!====================================================================== - subroutine vector_on_tangent_plane(p_1, p_2, p_out) - -!----------------------------------------------------------------------- -! PURPOSE : given two points measured in (x,y,z) and lying on -! the unit sphere, find the vector (p_out) that lies on the plane -! perpendicular to the p_1 vector and points in the direction of -! the projection of p_2 onto the tangent plane. -! -! NOTE : p_1 and p_2 are assumed to be of unit length -! NOTE : p_out is normalized to unit length -!----------------------------------------------------------------------- - -!----------------------------------------------------------------------- -! intent(in) -!----------------------------------------------------------------------- - real , intent(in) :: & - p_1 (:), & - p_2 (:) - -!----------------------------------------------------------------------- -! intent(out) -!----------------------------------------------------------------------- - real , intent(out) :: & - p_out (:) - -!----------------------------------------------------------------------- -! local -!----------------------------------------------------------------------- - real :: & - work (3), t1(3), t2(3) - -! work (1) = - p_1(2) * ( -p_1(2) * p_2(1) + p_1(1) * p_2(2) ) & -! + p_1(3) * ( p_1(3) * p_2(1) - p_1(1) * p_2(3) ) - -! work (2) = + p_1(1) * ( -p_1(2) * p_2(1) + p_1(1) * p_2(2) ) & -! - p_1(3) * ( -p_1(3) * p_2(2) + p_1(2) * p_2(3) ) - -! work (3) = - p_1(1) * ( p_1(3) * p_2(1) - p_1(1) * p_2(3) ) & -! + p_1(2) * ( -p_1(3) * p_2(2) + p_1(2) * p_2(3) ) - - - t1(:) = p_2(:) - p_1(:) - t2(:) = p_1 - - call unit_vector_in_3space (t1) - call unit_vector_in_3space (t2) - - call cross_product_in_3space(t1(:), t2(:), work(:)) - call unit_vector_in_3space (work) - call cross_product_in_3space(t2(:),work(:),p_out(:)) - call unit_vector_in_3space (p_out) - - end subroutine vector_on_tangent_plane -!====================================================================== -! END OF VECTOR_ON_TANGENT_PLANE -!====================================================================== - -end module utilities diff --git a/grid_gen/global_scvt/INSTALL b/grid_gen/global_scvt/INSTALL deleted file mode 100644 index 84c621a0c..000000000 --- a/grid_gen/global_scvt/INSTALL +++ /dev/null @@ -1,6 +0,0 @@ -1) Edit the Makefile to set appropriate compilers and compiler flags. - The main loop of the Lloyd iteration code is parallelized with OpenMP, - so if desired, OpenMP flags may be specified in FFLAGS and LDFLAGS. - -2) Run 'make' to create an executable, grid_gen, in the src directory, as - well as a symbolic link to the executable in the top-level directory. diff --git a/grid_gen/global_scvt/Makefile b/grid_gen/global_scvt/Makefile deleted file mode 100644 index aaef05ca9..000000000 --- a/grid_gen/global_scvt/Makefile +++ /dev/null @@ -1,35 +0,0 @@ -#FC = ifort -#FFLAGS = -FR -m64 -O3 -fast -ipo -openmp -#F77FLAGS = -FI -m64 -O3 -fast -ipo -openmp -#CPPFLAGS = -DRKIND=8 -#PROMOTION = -r8 -#LDFLAGS = -m64 -O3 -fast -ipo -openmp - -FC = gfortran -FFLAGS = -ffree-form -O3 -fopenmp -ffree-line-length-none -F77FLAGS = -ffixed-form -O3 -fopenmp -fsecond-underscore -CPPFLAGS = -DRKIND=8 -PROMOTION = -fdefault-real-8 -LDFLAGS = -O3 -fopenmp - -#FC = pgf90 -#FFLAGS = -Mfree -O3 -mp -byteswapio -#F77FLAGS = -O3 -byteswapio -#CPPFLAGS = -DRKIND=8 -#PROMOTION = -r8 -#LDFLAGS = -O3 -mp -byteswapio - -all: grid_gen grid_ref - -grid_gen: - ( cd src; make FC="$(FC)" FFLAGS="$(FFLAGS)" F77FLAGS="$(F77FLAGS)" CPPFLAGS="$(CPPFLAGS)" PROMOTION="$(PROMOTION)" LDFLAGS="$(LDFLAGS)" ) - if [ ! -e grid_gen ]; then ln -s src/grid_gen .; fi - -grid_ref: - $(FC) refine/grid_ref.f refine/svtgen.f -o refine/grid_ref - if [ ! -e grid_ref ]; then ln -s refine/grid_ref .; fi - -clean: - ( cd src; make clean ) - rm -f grid_gen grid_ref refine/grid_ref - diff --git a/grid_gen/global_scvt/README b/grid_gen/global_scvt/README deleted file mode 100644 index 518827a7b..000000000 --- a/grid_gen/global_scvt/README +++ /dev/null @@ -1,52 +0,0 @@ -The grid_gen utility converts a set of generating points into a complete MPAS -input file, grid.nc. Additionally, grid_gen creates a graph description file -suitable for use with METIS, plots of the initial Delaunay triangulation and -final Delaunay triangulation (after any further adjustment by grid_gen), and a -list of the final generating points. - - - -Input files: - - namelist.input - a Fortran namelist with the following variables - np - the number of generating points in the locs.dat input file - locs_as_xyz - whether the generating points are given as (x,y,z) - coordinates or as (lat,lon) coordinates - n_scvt_iterations - the number of Lloyd iterations to perform, - beginning with the generating points in the locs.dat file, - using the density function specified programmatically in - src/module_scvt.F - - locs.dat - a list of generating points, either given as (lat,lon) - coordinates or as (x,y,z) coordinates; the coorinates used in the - file should be indicated to grid_gen using the locs_as_xyz logical - value in the namelist.input file. - - The format of the file for (x,y,z) coordinates has a header line - giving the number of generating points and the maximum degree of any - node in the Delaunay triangulation, followed by np lines with the - format '(10x,f22.10,f23.10,f23.10)' giving the x, y, and z - coordinates (on the unit sphere) of each generating point. - - The format of the file for (lat,lon) coordinates has np lines with - the format '(f13.10,1x,f13.10)' giving the latitude and longitude - coorinates of each generating point. - - - -Output files: - - scvt_initial.ps - a plot of the Delaunay triangulation of the generating - points specified in the locs.dat file - - scvt_final.ps - a plot of the Delaunay triangulation of the final - generating points, after adjustment by the number of Lloyd iterations - specified in the namelist.input file - - locs.dat.out - a list of the final generating points, after adjustment by - the number of Lloyd iterations specified in the namelist.input file - - graph.info - a description of the output mesh in a format suitable for use - with METIS to produce graph decomposition files for MPAS - - grid.nc - an MPAS input file diff --git a/grid_gen/global_scvt/centroids.162.dat b/grid_gen/global_scvt/centroids.162.dat deleted file mode 100644 index 90f85cf58..000000000 --- a/grid_gen/global_scvt/centroids.162.dat +++ /dev/null @@ -1,163 +0,0 @@ - 162 162 - 1 -0.8909593038 -0.0786862054 0.4472135955 - 2 -0.2004865378 -0.8716680264 0.4472135955 - 3 0.0000000000 0.0000000000 1.0000000000 - 4 0.8909593038 0.0786862054 -0.4472135955 - 5 0.2004865378 0.8716680264 -0.4472135955 - 6 0.3501565946 -0.8230372770 -0.4472135955 - 7 0.7670518092 -0.4600342618 0.4472135955 - 8 -0.7670518092 0.4600342618 -0.4472135955 - 9 0.0000000000 0.0000000000 -1.0000000000 - 10 0.6745506270 0.5873512166 0.4472135955 - 11 -0.6745506270 -0.5873512166 -0.4472135955 - 12 -0.3501565946 0.8230372770 0.4472135955 - 13 -0.7295096215 0.4375185824 0.5257311121 - 14 -0.9745544804 0.2241507635 0.0000000000 - 15 -0.9201836497 -0.3914869741 0.0000000000 - 16 -0.6415357694 -0.5586042019 0.5257311121 - 17 -0.5236927392 -0.0462505911 0.8506508084 - 18 -0.5143339407 -0.8575899938 0.0000000000 - 19 0.0879738521 -0.9961227843 0.0000000000 - 20 0.3330187110 -0.7827549654 0.5257311121 - 21 -0.1178430302 -0.5123536108 0.8506508084 - 22 0.3964909105 0.3452363830 0.8506508084 - 23 -0.2058168823 0.4837691735 0.8506508084 - 24 0.4508617412 -0.2704013546 0.8506508084 - 25 0.9201836497 0.3914869741 0.0000000000 - 26 0.9745544804 -0.2241507635 0.0000000000 - 27 0.7295096215 -0.4375185824 -0.5257311121 - 28 0.5236927392 0.0462505911 -0.8506508084 - 29 0.6415357694 0.5586042019 -0.5257311121 - 30 0.5143339407 0.8575899938 0.0000000000 - 31 0.1178430302 0.5123536108 -0.8506508084 - 32 -0.3330187110 0.7827549654 -0.5257311121 - 33 -0.0879738521 0.9961227843 0.0000000000 - 34 -0.1906740282 -0.8290055565 -0.5257311121 - 35 0.2058168823 -0.4837691735 -0.8506508084 - 36 0.6566786235 -0.7541705281 0.0000000000 - 37 0.8473526517 0.0748350284 0.5257311121 - 38 -0.4508617412 0.2704013546 -0.8506508084 - 39 -0.8473526517 -0.0748350284 -0.5257311121 - 40 -0.6566786235 0.7541705281 0.0000000000 - 41 -0.3964909105 -0.3452363830 -0.8506508084 - 42 0.1906740282 0.8290055565 0.5257311121 - 43 -0.8434663484 0.1827424714 0.5051432551 - 44 -0.9690454298 0.0733970446 0.2357198099 - 45 -0.9411818011 -0.2421011948 0.2357198099 - 46 -0.7983820501 -0.3277444034 0.5051432551 - 47 -0.7379905791 -0.0651765777 0.6716561016 - 48 -0.1660650217 -0.7220114194 0.6716561016 - 49 -0.4344438541 -0.7457136377 0.5051432551 - 50 -0.3692562437 -0.8989360365 0.2357198099 - 51 -0.0605892524 -0.9699304685 0.2357198099 - 52 0.0649898290 -0.8605850417 0.5051432551 - 53 0.2310548507 -0.1385736223 0.9630218103 - 54 0.2031912220 0.1769246171 0.9630218103 - 55 -0.1054757693 0.2479190491 0.9630218103 - 56 -0.2683788324 -0.0237022183 0.9630218103 - 57 -0.0603914710 -0.2625678257 0.9630218103 - 58 0.7983820501 0.3277444034 -0.5051432551 - 59 0.9411818011 0.2421011948 -0.2357198099 - 60 0.9690454298 -0.0733970446 -0.2357198099 - 61 0.8434663484 -0.1827424714 -0.5051432551 - 62 0.7379905791 0.0651765777 -0.6716561016 - 63 0.3692562437 0.8989360365 -0.2357198099 - 64 0.4344438541 0.7457136377 -0.5051432551 - 65 0.1660650217 0.7220114194 -0.6716561016 - 66 -0.0649898290 0.8605850417 -0.5051432551 - 67 0.0605892524 0.9699304685 -0.2357198099 - 68 0.0868470176 -0.8586546962 -0.5051432551 - 69 0.2900382396 -0.6817300791 -0.6716561016 - 70 0.5584170720 -0.6580278608 -0.5051432551 - 71 0.5210930903 -0.8203037014 -0.2357198099 - 72 0.2296467686 -0.9442979047 -0.2357198099 - 73 0.7408325207 -0.6289700689 0.2357198099 - 74 0.9037355838 -0.3573488014 0.2357198099 - 75 0.8385479733 -0.2041264026 0.5051432551 - 76 0.6353567514 -0.3810510197 0.6716561016 - 77 0.5749652804 -0.6436188454 0.5051432551 - 78 -0.5749652804 0.6436188454 -0.5051432551 - 79 -0.6353567514 0.3810510197 -0.6716561016 - 80 -0.8385479733 0.2041264026 -0.5051432551 - 81 -0.9037355838 0.3573488014 -0.2357198099 - 82 -0.7408325207 0.6289700689 -0.2357198099 - 83 0.0603914710 0.2625678257 -0.9630218103 - 84 0.2683788324 0.0237022183 -0.9630218103 - 85 0.1054757693 -0.2479190491 -0.9630218103 - 86 -0.2031912220 -0.1769246171 -0.9630218103 - 87 -0.2310548507 0.1385736223 -0.9630218103 - 88 0.4532613197 0.7344279869 0.5051432551 - 89 0.5587370890 0.4865089377 0.6716561016 - 90 0.7897919397 0.3479353154 0.5051432551 - 91 0.8271159214 0.5102111560 0.2357198099 - 92 0.6191285600 0.7490767634 0.2357198099 - 93 -0.6191285600 -0.7490767634 -0.2357198099 - 94 -0.8271159214 -0.5102111560 -0.2357198099 - 95 -0.7897919397 -0.3479353154 -0.5051432551 - 96 -0.5587370890 -0.4865089377 -0.6716561016 - 97 -0.4532613197 -0.7344279869 -0.5051432551 - 98 -0.2296467686 0.9442979047 0.2357198099 - 99 -0.5210930903 0.8203037014 0.2357198099 - 100 -0.5584170720 0.6580278608 0.5051432551 - 101 -0.2900382396 0.6817300791 0.6716561016 - 102 -0.0868470176 0.8586546962 0.5051432551 - 103 -0.7389477008 0.6174931387 0.2695524424 - 104 -0.8927233180 0.3610907895 0.2695524424 - 105 -0.6485012287 0.2146202028 0.7303316540 - 106 -0.4947256115 0.4710225519 0.7303316540 - 107 -0.9607746082 0.0652198824 -0.2695524424 - 108 -0.9960073301 -0.0879636556 -0.0152247717 - 109 -0.8574905961 0.5142743275 0.0152247717 - 110 -0.7540831171 -0.6566025121 0.0152247717 - 111 -0.8156182708 -0.5119651523 0.2695524424 - 112 -0.9344721764 -0.2326010154 -0.2695524424 - 113 -0.4045138430 -0.5504400294 0.7303316540 - 114 -0.6008476888 -0.3249580433 0.7303316540 - 115 -0.6192844249 -0.7374471383 0.2695524424 - 116 -0.3951596413 0.2369943877 0.8875147988 - 117 -0.3475061014 -0.3025838584 0.8875147988 - 118 -0.3589234758 -0.8935968998 -0.2695524424 - 119 -0.2241247837 -0.9744415260 -0.0152247717 - 120 -0.0675510718 -0.9606135193 -0.2695524424 - 121 0.3914415994 -0.9200769971 0.0152247717 - 122 0.2348678876 -0.9339050038 0.2695524424 - 123 0.5099844947 -0.8168581860 0.2695524424 - 124 0.3984979249 -0.5548108497 0.7303316540 - 125 0.1233813177 -0.6718576676 0.7303316540 - 126 0.1803890593 -0.4240014966 0.8875147988 - 127 0.4589926712 0.0405365222 0.8875147988 - 128 0.6507991050 0.2075480669 0.7303316540 - 129 0.2950904458 0.6160659899 0.7303316540 - 130 0.1032840121 0.4490544452 0.8875147988 - 131 0.0037180419 0.6830826094 0.7303316540 - 132 0.6771015368 -0.0902728309 0.7303316540 - 133 0.9344721764 0.2326010154 0.2695524424 - 134 0.9960073301 0.0879636556 0.0152247717 - 135 0.8156182708 0.5119651523 -0.2695524424 - 136 0.7540831171 0.6566025121 -0.0152247717 - 137 0.8574905961 -0.5142743275 -0.0152247717 - 138 0.8927233180 -0.3610907895 -0.2695524424 - 139 0.9607746082 -0.0652198824 0.2695524424 - 140 0.4947256115 -0.4710225519 -0.7303316540 - 141 0.6485012287 -0.2146202028 -0.7303316540 - 142 0.7389477008 -0.6174931387 -0.2695524424 - 143 0.3475061014 0.3025838584 -0.8875147988 - 144 0.6008476888 0.3249580433 -0.7303316540 - 145 0.3951596413 -0.2369943877 -0.8875147988 - 146 0.6192844249 0.7374471383 -0.2695524424 - 147 0.4045138430 0.5504400294 -0.7303316540 - 148 0.2241247837 0.9744415260 0.0152247717 - 149 0.3589234758 0.8935968998 0.2695524424 - 150 -0.1803890593 0.4240014966 -0.8875147988 - 151 -0.1233813177 0.6718576676 -0.7303316540 - 152 -0.3984979249 0.5548108497 -0.7303316540 - 153 -0.5099844947 0.8168581860 -0.2695524424 - 154 -0.2348678876 0.9339050038 -0.2695524424 - 155 -0.3914415994 0.9200769971 -0.0152247717 - 156 0.0675510718 0.9606135193 0.2695524424 - 157 -0.2950904458 -0.6160659899 -0.7303316540 - 158 -0.0037180419 -0.6830826094 -0.7303316540 - 159 -0.1032840121 -0.4490544452 -0.8875147988 - 160 -0.4589926712 -0.0405365222 -0.8875147988 - 161 -0.6771015368 0.0902728309 -0.7303316540 - 162 -0.6507991050 -0.2075480669 -0.7303316540 diff --git a/grid_gen/global_scvt/convergence b/grid_gen/global_scvt/convergence deleted file mode 100644 index dbb9c0147..000000000 --- a/grid_gen/global_scvt/convergence +++ /dev/null @@ -1 +0,0 @@ - eps = 1.0e-10 diff --git a/grid_gen/global_scvt/dx/README b/grid_gen/global_scvt/dx/README deleted file mode 100644 index 27a14cd79..000000000 --- a/grid_gen/global_scvt/dx/README +++ /dev/null @@ -1,17 +0,0 @@ -NOTES: -Date files in the directory are automatically generated by running "grid_gen" -The mesh visualized here is consistent with the "locs.dat.out" file generated by grid_gen - -To Run OpenDX: -1. Install OpenDX -2. From this directory, type "dx" -3. Select edit visual program -4. Open mesh.net -5. Under Execute, select Execute on Change -6. Under Window, Open Control Panel by Name, select Main Panel -7. To save an image, from the image panel select Save Image under File - -NOTE: -You can keep OpenDX running and load new mesh generated by grid_gen by: -1. Select reset server under connect -2. Select execute on change diff --git a/grid_gen/global_scvt/dx/example.tiff b/grid_gen/global_scvt/dx/example.tiff deleted file mode 100644 index 5447b4d41..000000000 Binary files a/grid_gen/global_scvt/dx/example.tiff and /dev/null differ diff --git a/grid_gen/global_scvt/dx/mesh.cfg b/grid_gen/global_scvt/dx/mesh.cfg deleted file mode 100644 index bdecee6a5..000000000 --- a/grid_gen/global_scvt/dx/mesh.cfg +++ /dev/null @@ -1,142 +0,0 @@ -// -// time: Fri Feb 26 16:11:30 2010 -// -// version: 3.2.0 (format), 4.4.4 (DX) -// -// inaccessible panels: 5 -// inaccessible groups: -// -// panel[0]: position = (0.0078,0.1979), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 124, height = 56 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[1]: position = (0.0078,0.1697), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 185, height = 60 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[2]: position = (0.0836,0.2648), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 185, height = 60 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[3]: position = (0.4891,0.1465), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 126, height = 59 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[4]: position = (0.0258,0.1620), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 137, height = 59 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[5]: position = (0.0531,0.0874), size = 0.1586x0.3997, startup = 1, devstyle = 1, screen = 0 -// title: value = Main Panel -// -// inaccessible panels: 5 -// inaccessible groups: -// workspace: width = 143, height = 203 -// layout: snap = 0, width = 50, height = 50, align = NC -// -// panel[6]: position = (0.0078,0.1131), size = 0.2930x0.5784, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 131, height = 56 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// interactor Integer[5]: num_components = 1, value = 1 -// component[0]: minimum = 0, maximum = 1, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 3, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// label: value = Show Mesh -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 5, x = 15, y = 3, style = Stepper, vertical = 1, size = 121x54 -// label: value = Show Mesh -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[1]: num_components = 1, value = "voronoi.dx" -// instance: panel = 1, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// -// interactor String[1]: num_components = 1, value = "area" -// instance: panel = 0, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[1]: num_components = 1, value = 1 -// component[0]: minimum = 0, maximum = 1, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 4, x = 5, y = 5, style = Stepper, vertical = 1, size = 132x54 -// label: value = Show Scalar Data -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 5, x = 9, y = 78, style = Stepper, vertical = 1, size = 131x54 -// label: value = Show Scalar Data -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// node Colormap[1]: -// input[1]: defaulting = 0, value = { [0.0 0.74683544] [1.0 0.0] } -// input[2]: defaulting = 0, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } -// input[3]: defaulting = 0, value = { [0.84699454 1.0] } -// input[4]: defaulting = 0, value = { [0.84972678 1.0] } -// input[5]: defaulting = 0, value = "Colormap_1" -// input[7]: defaulting = 1, value = 0.067314833 -// input[8]: defaulting = 1, value = 0.080266804 -// input[9]: defaulting = 1, value = 20 -// input[12]: defaulting = 0, value = { 0.067314833 0.080266804 } -// input[17]: defaulting = 0, value = 0.067314833 -// input[18]: defaulting = 0, value = 0.080266804 -// window: position = (0.0602,0.4692), size = 0.4297x0.4280, screen = 0 -// -// interactor Integer[7]: num_components = 1, value = 0 -// component[0]: minimum = 0, maximum = 1, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 5, x = 7, y = 149, style = Stepper, vertical = 1, size = 136x54 -// label: value = Show Topography -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[4]: num_components = 1, value = "topography.dx" -// instance: panel = 2, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// -// interactor String[4]: num_components = 1, value = "tpg" -// -// interactor String[7]: num_components = 1, value = "white" -// instance: panel = 6, x = 5, y = 5, style = Text, vertical = 1, size = 126x51 -// -// node Image[2]: -// depth: value = 24 -// window: position = (0.2234,0.0656), size = 0.5680x0.8985, screen = 0 -// input[1]: defaulting = 0, value = "Image_2" -// input[4]: defaulting = 0, value = 1 -// input[5]: defaulting = 0, value = [0.015844 -0.0369692 0] -// input[6]: defaulting = 0, value = [0.456141 -6.14723 3.44158] -// input[7]: defaulting = 0, value = 2.49278 -// input[8]: defaulting = 0, value = 713 -// input[9]: defaulting = 0, value = 0.922 -// input[10]: defaulting = 0, value = [0.0452349 0.492725 0.869008] -// input[11]: defaulting = 1, value = 20.1171 -// input[12]: defaulting = 0, value = 0 -// input[14]: defaulting = 0, value = 1 -// input[15]: defaulting = 1, value = "none" -// input[16]: defaulting = 1, value = "none" -// input[17]: defaulting = 1, value = 1 -// input[18]: defaulting = 1, value = 1 -// input[19]: defaulting = 0, value = 0 -// input[22]: defaulting = 0, value = "black" -// input[25]: defaulting = 0, value = "./example.tiff" -// input[26]: defaulting = 0, value = "tiff gamma=1" -// input[27]: defaulting = 0, value = 500 -// input[28]: defaulting = 1, value = 1.0 -// input[29]: defaulting = 0, value = 0 -// input[30]: defaulting = 0, value = {"x axis", "y axis", ""} -// input[31]: defaulting = 0, value = { -15 -15 15 } -// input[34]: defaulting = 0, value = 1 -// input[37]: defaulting = 0, value = {"grey30", "grey5", "yellow", "white"} -// input[38]: defaulting = 0, value = {"background", "grid", "ticks", "labels"} -// input[39]: defaulting = 0, value = 0.5 -// input[41]: defaulting = 0, value = "none" -// internal caching: 1 diff --git a/grid_gen/global_scvt/dx/mesh.net b/grid_gen/global_scvt/dx/mesh.net deleted file mode 100644 index 98eee9b80..000000000 --- a/grid_gen/global_scvt/dx/mesh.net +++ /dev/null @@ -1,852 +0,0 @@ -// -// time: Fri Feb 26 16:11:30 2010 -// -// version: 3.2.0 (format), 4.4.4 (DX) -// -// -// MODULE main -// workspace: width = 779, height = 713 -// layout: snap = 0, width = 50, height = 50, align = NN -// -macro main( -) -> ( -) { - // - // node Integer[5]: x = 61, y = 420, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_5" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = 0 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node FileSelector[1]: x = 66, y = 17, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "voronoi.dx" - // output[2]: visible = 1, type = 32, value = "voronoi.dx" - // - // - // node String[1]: x = 187, y = 20, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "area" - // - // - // node Import[1]: x = 120, y = 93, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_1_out_1 = - Import( - main_FileSelector_1_out_1, - main_String_1_out_1, - main_Import_1_in_3, - main_Import_1_in_4, - main_Import_1_in_5, - main_Import_1_in_6 - ) [instance: 1, cache: 1]; - // - // node ShowConnections[2]: x = 65, y = 261, inputs = 1, label = ShowConnections - // -main_ShowConnections_2_out_1 = - ShowConnections( - main_Import_1_out_1 - ) [instance: 2, cache: 1]; - // - // node Color[3]: x = 102, y = 345, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "yellow" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_3_out_1 = - Color( - main_ShowConnections_2_out_1, - main_Color_3_in_2, - main_Color_3_in_3, - main_Color_3_in_4, - main_Color_3_in_5 - ) [instance: 3, cache: 1]; - // - // node Switch[7]: x = 138, y = 463, inputs = 2, label = Switch - // -main_Switch_7_out_1 = - Switch( - main_Integer_5_out_1, - main_Color_3_out_1 - ) [instance: 7, cache: 1]; - // - // node Integer[1]: x = 220, y = 439, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_1" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = 0 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node Colormap[1]: x = 254, y = 167, inputs = 19, label = Colormap - // input[1]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 0.74683544] [1.0 0.0] } - // input[2]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } - // input[3]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84699454 1.0] } - // input[4]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84972678 1.0] } - // input[5]: defaulting = 0, visible = 0, type = 32, value = "Colormap_1" - // input[7]: defaulting = 1, visible = 0, type = 5, value = 0.067314833 - // input[8]: defaulting = 1, visible = 0, type = 5, value = 0.080266804 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 20 - // input[12]: defaulting = 0, visible = 0, type = 16777221, value = { 0.067314833 0.080266804 } - // input[17]: defaulting = 0, visible = 0, type = 5, value = 0.067314833 - // input[18]: defaulting = 0, visible = 0, type = 5, value = 0.080266804 - // window: position = (0.0602,0.4692), size = 0.4297x0.4280, screen = 0 - // -main_Colormap_1_out_1[cache: 2], -main_Colormap_1_out_2[cache: 2] = - Colormap( - main_Colormap_1_in_1, - main_Colormap_1_in_2, - main_Colormap_1_in_3, - main_Colormap_1_in_4, - main_Colormap_1_in_5, - main_Import_1_out_1, - main_Colormap_1_in_7, - main_Colormap_1_in_8, - main_Colormap_1_in_9, - main_Colormap_1_in_10, - main_Colormap_1_in_11, - main_Colormap_1_in_12, - main_Colormap_1_in_13, - main_Colormap_1_in_14, - main_Colormap_1_in_15, - main_Colormap_1_in_16, - main_Colormap_1_in_17, - main_Colormap_1_in_18, - main_Colormap_1_in_19 - ) [instance: 1, cache: 1]; - // - // node Color[5]: x = 275, y = 280, inputs = 5, label = Color - // input[2]: defaulting = 1, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_5_out_1 = - Color( - main_Import_1_out_1, - main_Colormap_1_out_1, - main_Color_5_in_3, - main_Color_5_in_4, - main_Color_5_in_5 - ) [instance: 5, cache: 1]; - // - // node Collect[3]: x = 245, y = 353, inputs = 2, label = Collect - // -main_Collect_3_out_1 = - Collect( - main_Collect_3_in_1, - main_Color_5_out_1 - ) [instance: 3, cache: 1]; - // - // node Switch[1]: x = 309, y = 436, inputs = 2, label = Switch - // -main_Switch_1_out_1 = - Switch( - main_Integer_1_out_1, - main_Collect_3_out_1 - ) [instance: 1, cache: 1]; - // - // node Integer[7]: x = 554, y = 437, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_7" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[5]: defaulting = 1, visible = 0, type = 1, value = 0 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 0 - // - // - // node FileSelector[4]: x = 544, y = 107, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "topography.dx" - // output[2]: visible = 1, type = 32, value = "topography.dx" - // - // - // node String[4]: x = 663, y = 110, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "tpg" - // - // - // node Import[4]: x = 597, y = 183, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_4_out_1 = - Import( - main_FileSelector_4_out_1, - main_String_4_out_1, - main_Import_4_in_3, - main_Import_4_in_4, - main_Import_4_in_5, - main_Import_4_in_6 - ) [instance: 4, cache: 1]; - // - // node ShowConnections[3]: x = 580, y = 279, inputs = 1, label = ShowConnections - // -main_ShowConnections_3_out_1 = - ShowConnections( - main_Import_4_out_1 - ) [instance: 3, cache: 1]; - // - // node Color[8]: x = 617, y = 363, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_8_out_1 = - Color( - main_ShowConnections_3_out_1, - main_Color_8_in_2, - main_Color_8_in_3, - main_Color_8_in_4, - main_Color_8_in_5 - ) [instance: 8, cache: 1]; - // - // node Tube[3]: x = 731, y = 419, inputs = 4, label = Tube - // input[2]: defaulting = 0, visible = 1, type = 5, value = 0.01 - // -main_Tube_3_out_1 = - Tube( - main_Color_8_out_1, - main_Tube_3_in_2, - main_Tube_3_in_3, - main_Tube_3_in_4 - ) [instance: 3, cache: 1]; - // - // node Switch[9]: x = 653, y = 481, inputs = 2, label = Switch - // -main_Switch_9_out_1 = - Switch( - main_Integer_7_out_1, - main_Tube_3_out_1 - ) [instance: 9, cache: 1]; - // - // node Collect[2]: x = 406, y = 576, inputs = 7, label = Collect - // -main_Collect_2_out_1 = - Collect( - main_Switch_7_out_1, - main_Switch_1_out_1, - main_Switch_9_out_1, - main_Collect_2_in_4, - main_Collect_2_in_5, - main_Collect_2_in_6, - main_Collect_2_in_7 - ) [instance: 2, cache: 1]; - // - // node String[7]: x = 13, y = 114, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "white" - // - // - // node ColorBar[1]: x = 65, y = 176, inputs = 16, label = ColorBar - // input[2]: defaulting = 0, visible = 1, type = 8, value = [0.05 0.15] - // input[3]: defaulting = 0, visible = 1, type = 8, value = [200 15] - // input[4]: defaulting = 0, visible = 1, type = 3, value = 0 - // input[9]: defaulting = 1, visible = 1, type = 16777248, value = {"white"} - // -main_ColorBar_1_out_1 = - ColorBar( - main_Colormap_1_out_1, - main_ColorBar_1_in_2, - main_ColorBar_1_in_3, - main_ColorBar_1_in_4, - main_ColorBar_1_in_5, - main_ColorBar_1_in_6, - main_ColorBar_1_in_7, - main_ColorBar_1_in_8, - main_String_7_out_1, - main_ColorBar_1_in_10, - main_ColorBar_1_in_11, - main_ColorBar_1_in_12, - main_ColorBar_1_in_13, - main_ColorBar_1_in_14, - main_ColorBar_1_in_15, - main_ColorBar_1_in_16 - ) [instance: 1, cache: 1]; - // - // node Image[2]: x = 453, y = 651, inputs = 49, label = Image - // input[1]: defaulting = 0, visible = 0, type = 67108863, value = "Image_2" - // input[4]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 0, visible = 0, type = 8, value = [0.015844 -0.0369692 0] - // input[6]: defaulting = 0, visible = 0, type = 8, value = [0.456141 -6.14723 3.44158] - // input[7]: defaulting = 0, visible = 0, type = 5, value = 2.49278 - // input[8]: defaulting = 0, visible = 0, type = 1, value = 713 - // input[9]: defaulting = 0, visible = 0, type = 5, value = 0.922 - // input[10]: defaulting = 0, visible = 0, type = 8, value = [0.0452349 0.492725 0.869008] - // input[11]: defaulting = 1, visible = 0, type = 5, value = 20.1171 - // input[12]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[14]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[15]: defaulting = 1, visible = 0, type = 32, value = "none" - // input[16]: defaulting = 1, visible = 0, type = 32, value = "none" - // input[17]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[18]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[19]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[22]: defaulting = 0, visible = 0, type = 32, value = "black" - // input[25]: defaulting = 0, visible = 0, type = 32, value = "./example.tiff" - // input[26]: defaulting = 0, visible = 0, type = 32, value = "tiff gamma=1" - // input[27]: defaulting = 0, visible = 0, type = 1, value = 500 - // input[28]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[29]: defaulting = 0, visible = 0, type = 3, value = 0 - // input[30]: defaulting = 0, visible = 0, type = 16777248, value = {"x axis", "y axis", ""} - // input[31]: defaulting = 0, visible = 0, type = 16777217, value = { -15 -15 15 } - // input[34]: defaulting = 0, visible = 0, type = 3, value = 1 - // input[37]: defaulting = 0, visible = 0, type = 16777248, value = {"grey30", "grey5", "yellow", "white"} - // input[38]: defaulting = 0, visible = 0, type = 16777248, value = {"background", "grid", "ticks", "labels"} - // input[39]: defaulting = 0, visible = 0, type = 5, value = 0.5 - // input[41]: defaulting = 0, visible = 0, type = 32, value = "none" - // depth: value = 24 - // window: position = (0.2234,0.0656), size = 0.5680x0.8985, screen = 0 - // internal caching: 1 - // -main_Image_2_out_1, -main_Image_2_out_2, -main_Image_2_out_3 = - Image( - main_Image_2_in_1, - main_Collect_2_out_1, - main_Image_2_in_3, - main_Image_2_in_4, - main_Image_2_in_5, - main_Image_2_in_6, - main_Image_2_in_7, - main_Image_2_in_8, - main_Image_2_in_9, - main_Image_2_in_10, - main_Image_2_in_11, - main_Image_2_in_12, - main_Image_2_in_13, - main_Image_2_in_14, - main_Image_2_in_15, - main_Image_2_in_16, - main_Image_2_in_17, - main_Image_2_in_18, - main_Image_2_in_19, - main_Image_2_in_20, - main_Image_2_in_21, - main_Image_2_in_22, - main_Image_2_in_23, - main_Image_2_in_24, - main_Image_2_in_25, - main_Image_2_in_26, - main_Image_2_in_27, - main_Image_2_in_28, - main_Image_2_in_29, - main_Image_2_in_30, - main_Image_2_in_31, - main_Image_2_in_32, - main_Image_2_in_33, - main_Image_2_in_34, - main_Image_2_in_35, - main_Image_2_in_36, - main_Image_2_in_37, - main_Image_2_in_38, - main_Image_2_in_39, - main_Image_2_in_40, - main_Image_2_in_41, - main_Image_2_in_42, - main_Image_2_in_43, - main_Image_2_in_44, - main_Image_2_in_45, - main_Image_2_in_46, - main_Image_2_in_47, - main_Image_2_in_48, - main_Image_2_in_49 - ) [instance: 2, cache: 1]; - // - // node Tube[2]: x = 11, y = 345, inputs = 4, label = Tube - // input[2]: defaulting = 1, visible = 1, type = 5, value = 0.0025 - // input[3]: defaulting = 0, visible = 0, type = 1, value = 16 - // -main_Tube_2_out_1 = - Tube( - main_Tube_2_in_1, - main_Tube_2_in_2, - main_Tube_2_in_3, - main_Tube_2_in_4 - ) [instance: 2, cache: 1]; -// network: end of macro body -CacheScene(main_Image_2_in_1, main_Image_2_out_1, main_Image_2_out_2); -} -main_Integer_5_in_1 = "Integer_5"; -main_Integer_5_in_2 = NULL; -main_Integer_5_in_3 = 1 ; -main_Integer_5_in_4 = NULL; -main_Integer_5_in_5 = NULL; -main_Integer_5_in_6 = NULL; -main_Integer_5_in_7 = NULL; -main_Integer_5_in_8 = NULL; -main_Integer_5_in_9 = NULL; -main_Integer_5_in_10 = NULL; -main_Integer_5_in_11 = NULL; -main_Integer_5_out_1 = 1 ; -main_FileSelector_1_out_1 = "voronoi.dx"; -main_String_1_out_1 = "area"; -main_Import_1_in_3 = "dx"; -main_Import_1_in_4 = NULL; -main_Import_1_in_5 = NULL; -main_Import_1_in_6 = NULL; -main_Import_1_out_1 = NULL; -main_ShowConnections_2_out_1 = NULL; -main_Color_3_in_2 = "yellow"; -main_Color_3_in_3 = 1.0; -main_Color_3_in_4 = NULL; -main_Color_3_in_5 = NULL; -main_Color_3_out_1 = NULL; -main_Switch_7_out_1 = NULL; -main_Integer_1_in_1 = "Integer_1"; -main_Integer_1_in_2 = NULL; -main_Integer_1_in_3 = 1 ; -main_Integer_1_in_4 = NULL; -main_Integer_1_in_5 = NULL; -main_Integer_1_in_6 = NULL; -main_Integer_1_in_7 = NULL; -main_Integer_1_in_8 = NULL; -main_Integer_1_in_9 = NULL; -main_Integer_1_in_10 = NULL; -main_Integer_1_in_11 = NULL; -main_Integer_1_out_1 = 1 ; -main_Colormap_1_in_1 = { [0.0 0.74683544] [1.0 0.0] }; -main_Colormap_1_in_2 = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] }; -main_Colormap_1_in_3 = { [0.84699454 1.0] }; -main_Colormap_1_in_4 = { [0.84972678 1.0] }; -main_Colormap_1_in_5 = "Colormap_1"; -main_Colormap_1_in_7 = NULL; -main_Colormap_1_in_8 = NULL; -main_Colormap_1_in_9 = NULL; -main_Colormap_1_in_10 = NULL; -main_Colormap_1_in_11 = NULL; -main_Colormap_1_in_12 = { 0.067314833 0.080266804 }; -main_Colormap_1_in_13 = NULL; -main_Colormap_1_in_14 = NULL; -main_Colormap_1_in_15 = NULL; -main_Colormap_1_in_16 = NULL; -main_Colormap_1_in_17 = 0.067314833; -main_Colormap_1_in_18 = 0.080266804; -main_Colormap_1_in_19 = NULL; -main_Colormap_1_out_1 = NULL; -main_Color_5_in_3 = 1.0; -main_Color_5_in_4 = NULL; -main_Color_5_in_5 = NULL; -main_Color_5_out_1 = NULL; -main_Collect_3_in_1 = NULL; -main_Collect_3_out_1 = NULL; -main_Switch_1_out_1 = NULL; -main_Integer_7_in_1 = "Integer_7"; -main_Integer_7_in_2 = NULL; -main_Integer_7_in_3 = 0 ; -main_Integer_7_in_4 = NULL; -main_Integer_7_in_5 = NULL; -main_Integer_7_in_6 = NULL; -main_Integer_7_in_7 = NULL; -main_Integer_7_in_8 = NULL; -main_Integer_7_in_9 = NULL; -main_Integer_7_in_10 = NULL; -main_Integer_7_in_11 = NULL; -main_Integer_7_out_1 = 0 ; -main_FileSelector_4_out_1 = "topography.dx"; -main_String_4_out_1 = "tpg"; -main_Import_4_in_3 = "dx"; -main_Import_4_in_4 = NULL; -main_Import_4_in_5 = NULL; -main_Import_4_in_6 = NULL; -main_Import_4_out_1 = NULL; -main_ShowConnections_3_out_1 = NULL; -main_Color_8_in_2 = "black"; -main_Color_8_in_3 = 1.0; -main_Color_8_in_4 = NULL; -main_Color_8_in_5 = NULL; -main_Color_8_out_1 = NULL; -main_Tube_3_in_2 = 0.01; -main_Tube_3_in_3 = NULL; -main_Tube_3_in_4 = NULL; -main_Tube_3_out_1 = NULL; -main_Switch_9_out_1 = NULL; -main_Collect_2_in_4 = NULL; -main_Collect_2_in_5 = NULL; -main_Collect_2_in_6 = NULL; -main_Collect_2_in_7 = NULL; -main_Collect_2_out_1 = NULL; -main_String_7_out_1 = "white"; -main_ColorBar_1_in_2 = [0.05 0.15]; -main_ColorBar_1_in_3 = [200 15]; -main_ColorBar_1_in_4 = 0; -main_ColorBar_1_in_5 = NULL; -main_ColorBar_1_in_6 = NULL; -main_ColorBar_1_in_7 = NULL; -main_ColorBar_1_in_8 = NULL; -main_ColorBar_1_in_10 = NULL; -main_ColorBar_1_in_11 = NULL; -main_ColorBar_1_in_12 = NULL; -main_ColorBar_1_in_13 = NULL; -main_ColorBar_1_in_14 = NULL; -main_ColorBar_1_in_15 = NULL; -main_ColorBar_1_in_16 = NULL; -macro Image( - id, - object, - where, - useVector, - to, - from, - width, - resolution, - aspect, - up, - viewAngle, - perspective, - options, - buttonState = 1, - buttonUpApprox = "none", - buttonDownApprox = "none", - buttonUpDensity = 1, - buttonDownDensity = 1, - renderMode = 0, - defaultCamera, - reset, - backgroundColor, - throttle, - RECenable = 0, - RECfile, - RECformat, - RECresolution, - RECaspect, - AAenable = 0, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - interactionMode, - title, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels, - webOptions) -> ( - object, - camera, - where) -{ - ImageMessage( - id, - backgroundColor, - throttle, - RECenable, - RECfile, - RECformat, - RECresolution, - RECaspect, - AAenable, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels, - interactionMode, - title, - renderMode, - buttonUpApprox, - buttonDownApprox, - buttonUpDensity, - buttonDownDensity) [instance: 1, cache: 1]; - autoCamera = - AutoCamera( - object, - "front", - object, - resolution, - aspect, - [0,1,0], - perspective, - viewAngle, - backgroundColor) [instance: 1, cache: 1]; - realCamera = - Camera( - to, - from, - width, - resolution, - aspect, - up, - perspective, - viewAngle, - backgroundColor) [instance: 1, cache: 1]; - coloredDefaultCamera = - UpdateCamera(defaultCamera, - background=backgroundColor) [instance: 1, cache: 1]; - nullDefaultCamera = - Inquire(defaultCamera, - "is null + 1") [instance: 1, cache: 1]; - resetCamera = - Switch( - nullDefaultCamera, - coloredDefaultCamera, - autoCamera) [instance: 1, cache: 1]; - resetNull = - Inquire( - reset, - "is null + 1") [instance: 2, cache: 1]; - reset = - Switch( - resetNull, - reset, - 0) [instance: 2, cache: 1]; - whichCamera = - Compute( - "($0 != 0 || $1 == 0) ? 1 : 2", - reset, - useVector) [instance: 1, cache: 1]; - camera = Switch( - whichCamera, - resetCamera, - realCamera) [instance: 3, cache: 1]; - AAobject = - AutoAxes( - object, - camera, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels) [instance: 1, cache: 1]; - switchAAenable = Compute("$0+1", - AAenable) [instance: 2, cache: 1]; - object = Switch( - switchAAenable, - object, - AAobject) [instance:4, cache: 1]; - SWapproximation_options = - Switch( - buttonState, - buttonUpApprox, - buttonDownApprox) [instance: 5, cache: 1]; - SWdensity_options = - Switch( - buttonState, - buttonUpDensity, - buttonDownDensity) [instance: 6, cache: 1]; - HWapproximation_options = - Format( - "%s,%s", - buttonDownApprox, - buttonUpApprox) [instance: 1, cache: 1]; - HWdensity_options = - Format( - "%d,%d", - buttonDownDensity, - buttonUpDensity) [instance: 2, cache: 1]; - switchRenderMode = Compute( - "$0+1", - renderMode) [instance: 3, cache: 1]; - approximation_options = Switch( - switchRenderMode, - SWapproximation_options, - HWapproximation_options) [instance: 7, cache: 1]; - density_options = Switch( - switchRenderMode, - SWdensity_options, - HWdensity_options) [instance: 8, cache: 1]; - renderModeString = Switch( - switchRenderMode, - "software", - "hardware")[instance: 9, cache: 1]; - object_tag = Inquire( - object, - "object tag")[instance: 3, cache: 1]; - annoted_object = - Options( - object, - "send boxes", - 0, - "cache", - 1, - "object tag", - object_tag, - "ddcamera", - whichCamera, - "rendering approximation", - approximation_options, - "render every", - density_options, - "button state", - buttonState, - "rendering mode", - renderModeString) [instance: 1, cache: 1]; - RECresNull = - Inquire( - RECresolution, - "is null + 1") [instance: 4, cache: 1]; - ImageResolution = - Inquire( - camera, - "camera resolution") [instance: 5, cache: 1]; - RECresolution = - Switch( - RECresNull, - RECresolution, - ImageResolution) [instance: 10, cache: 1]; - RECaspectNull = - Inquire( - RECaspect, - "is null + 1") [instance: 6, cache: 1]; - ImageAspect = - Inquire( - camera, - "camera aspect") [instance: 7, cache: 1]; - RECaspect = - Switch( - RECaspectNull, - RECaspect, - ImageAspect) [instance: 11, cache: 1]; - switchRECenable = Compute( - "$0 == 0 ? 1 : (($2 == $3) && ($4 == $5)) ? ($1 == 1 ? 2 : 3) : 4", - RECenable, - switchRenderMode, - RECresolution, - ImageResolution, - RECaspect, - ImageAspect) [instance: 4, cache: 1]; - NoRECobject, RECNoRerenderObject, RECNoRerHW, RECRerenderObject = Route(switchRECenable, annoted_object); - Display( - NoRECobject, - camera, - where, - throttle) [instance: 1, cache: 1]; - image = - Render( - RECNoRerenderObject, - camera) [instance: 1, cache: 1]; - Display( - image, - NULL, - where, - throttle) [instance: 2, cache: 1]; - WriteImage( - image, - RECfile, - RECformat) [instance: 1, cache: 1]; - rec_where = Display( - RECNoRerHW, - camera, - where, - throttle) [instance: 1, cache: 0]; - rec_image = ReadImageWindow( - rec_where) [instance: 1, cache: 1]; - WriteImage( - rec_image, - RECfile, - RECformat) [instance: 1, cache: 1]; - RECupdateCamera = - UpdateCamera( - camera, - resolution=RECresolution, - aspect=RECaspect) [instance: 2, cache: 1]; - Display( - RECRerenderObject, - camera, - where, - throttle) [instance: 1, cache: 1]; - RECRerenderObject = - ScaleScreen( - RECRerenderObject, - NULL, - RECresolution, - camera) [instance: 1, cache: 1]; - image = - Render( - RECRerenderObject, - RECupdateCamera) [instance: 2, cache: 1]; - WriteImage( - image, - RECfile, - RECformat) [instance: 2, cache: 1]; -} -main_Image_2_in_1 = "Image_2"; -main_Image_2_in_3 = "X24,,"; -main_Image_2_in_4 = 1; -main_Image_2_in_5 = [0.015844 -0.0369692 0]; -main_Image_2_in_6 = [0.456141 -6.14723 3.44158]; -main_Image_2_in_7 = 2.49278; -main_Image_2_in_8 = 713; -main_Image_2_in_9 = 0.922; -main_Image_2_in_10 = [0.0452349 0.492725 0.869008]; -main_Image_2_in_11 = NULL; -main_Image_2_in_12 = 0; -main_Image_2_in_13 = NULL; -main_Image_2_in_14 = 1; -main_Image_2_in_15 = NULL; -main_Image_2_in_16 = NULL; -main_Image_2_in_17 = NULL; -main_Image_2_in_18 = NULL; -main_Image_2_in_19 = 0; -main_Image_2_in_20 = NULL; -main_Image_2_in_21 = NULL; -main_Image_2_in_22 = "black"; -main_Image_2_in_23 = NULL; -main_Image_2_in_25 = "./example.tiff"; -main_Image_2_in_26 = "tiff gamma=1"; -main_Image_2_in_27 = 500; -main_Image_2_in_28 = NULL; -main_Image_2_in_29 = 0; -main_Image_2_in_30 = {"x axis", "y axis", ""}; -main_Image_2_in_31 = { -15 -15 15 }; -main_Image_2_in_32 = NULL; -main_Image_2_in_33 = NULL; -main_Image_2_in_34 = 1; -main_Image_2_in_35 = NULL; -main_Image_2_in_36 = NULL; -main_Image_2_in_37 = {"grey30", "grey5", "yellow", "white"}; -main_Image_2_in_38 = {"background", "grid", "ticks", "labels"}; -main_Image_2_in_39 = 0.5; -main_Image_2_in_40 = NULL; -main_Image_2_in_41 = "none"; -main_Image_2_in_42 = NULL; -main_Image_2_in_43 = NULL; -main_Image_2_in_44 = NULL; -main_Image_2_in_45 = NULL; -main_Image_2_in_46 = NULL; -main_Image_2_in_47 = NULL; -main_Image_2_in_48 = NULL; -main_Image_2_in_49 = NULL; -main_Tube_2_in_1 = NULL; -main_Tube_2_in_2 = NULL; -main_Tube_2_in_3 = 16; -main_Tube_2_in_4 = NULL; -Executive("product version 4 4 4"); -$sync -main(); diff --git a/grid_gen/global_scvt/dx/topography.dx b/grid_gen/global_scvt/dx/topography.dx deleted file mode 100644 index 6e0e1d5a2..000000000 --- a/grid_gen/global_scvt/dx/topography.dx +++ /dev/null @@ -1,235 +0,0 @@ -object "positions list" class array type float rank 1 shape 3 items 101 -data follows - - 0.2991601112E+00 -0.8219356504E+00 0.5049999952E+00 - 0.2947937299E+00 -0.8116439251E+00 0.5238614095E+00 - 0.2892384301E+00 -0.8013977596E+00 0.5424045927E+00 - 0.2825631166E+00 -0.7912255171E+00 0.5605535183E+00 - 0.2748426807E+00 -0.7811551564E+00 0.5782370649E+00 - 0.2661568154E+00 -0.7712145858E+00 0.5953894465E+00 - 0.2565887515E+00 -0.7614320220E+00 0.6119505447E+00 - 0.2462239371E+00 -0.7518363312E+00 0.6278661359E+00 - 0.2351486896E+00 -0.7424573287E+00 0.6430880108E+00 - 0.2234488481E+00 -0.7333260158E+00 0.6575739864E+00 - 0.2112084569E+00 -0.7244747360E+00 0.6712878111E+00 - 0.1985085106E+00 -0.7159372332E+00 0.6841989696E+00 - 0.1854257918E+00 -0.7077486012E+00 0.6962823953E+00 - 0.1720318285E+00 -0.6999451191E+00 0.7075180994E+00 - 0.1583920008E+00 -0.6925639711E+00 0.7178907304E+00 - 0.1445648184E+00 -0.6856428558E+00 0.7273890763E+00 - 0.1306013902E+00 -0.6792194960E+00 0.7360055257E+00 - 0.1165451010E+00 -0.6733310644E+00 0.7437355026E+00 - 0.1024315050E+00 -0.6680135450E+00 0.7505768923E+00 - 0.8828844081E-01 -0.6633010548E+00 0.7565294724E+00 - 0.7413636571E-01 -0.6592251512E+00 0.7615943671E+00 - 0.5998890034E-01 -0.6558141530E+00 0.7657735366E+00 - 0.4585357015E-01 -0.6530925023E+00 0.7690693169E+00 - 0.3173272249E-01 -0.6510801937E+00 0.7714840222E+00 - 0.1762459364E-01 -0.6497922942E+00 0.7730196174E+00 - 0.3524495337E-02 -0.6492385744E+00 0.7736774730E+00 - -0.1057391358E-01 -0.6494232656E+00 0.7734582052E+00 - -0.2467730726E-01 -0.6503449545E+00 0.7723616066E+00 - -0.3879136329E-01 -0.6519966182E+00 0.7703866697E+00 - -0.5291946212E-01 -0.6543657992E+00 0.7675317010E+00 - -0.6706145356E-01 -0.6574349134E+00 0.7637945253E+00 - -0.8121252588E-01 -0.6611816761E+00 0.7591727719E+00 - -0.9536221021E-01 -0.6655796308E+00 0.7536642387E+00 - -0.1094935484E+00 -0.6705987565E+00 0.7472673226E+00 - -0.1235824478E+00 -0.6762061295E+00 0.7399815063E+00 - -0.1375972401E+00 -0.6823666131E+00 0.7318078885E+00 - -0.1514984563E+00 -0.6890435459E+00 0.7227497426E+00 - -0.1652388226E+00 -0.6961994040E+00 0.7128130906E+00 - -0.1787634760E+00 -0.7037964096E+00 0.7020072751E+00 - -0.1920103928E+00 -0.7117970660E+00 0.6903455125E+00 - -0.2049110176E+00 -0.7201645994E+00 0.6778454140E+00 - -0.2173910734E+00 -0.7288632955E+00 0.6645294563E+00 - -0.2293715332E+00 -0.7378587222E+00 0.6504253890E+00 - -0.2407697257E+00 -0.7471178364E+00 0.6355665649E+00 - -0.2515005478E+00 -0.7566089794E+00 0.6199921834E+00 - -0.2614777540E+00 -0.7663017679E+00 0.6037474351E+00 - -0.2706152924E+00 -0.7761668952E+00 0.5868835447E+00 - -0.2788286562E+00 -0.7861758603E+00 0.5694577050E+00 - -0.2860362213E+00 -0.7963006451E+00 0.5515329033E+00 - -0.2921605418E+00 -0.8065133612E+00 0.5331776409E+00 - -0.2971295777E+00 -0.8167858906E+00 0.5144655516E+00 - -0.3008778301E+00 -0.8270895422E+00 0.4954749249E+00 - -0.3033473643E+00 -0.8373947450E+00 0.4762881464E+00 - -0.3044887036E+00 -0.8476707950E+00 0.4569910649E+00 - -0.3042615800E+00 -0.8578856732E+00 0.4376723014E+00 - -0.3026355325E+00 -0.8680059411E+00 0.4184225154E+00 - -0.2995903456E+00 -0.8779967242E+00 0.3993336423E+00 - -0.2951163286E+00 -0.8878217810E+00 0.3804981190E+00 - -0.2892144352E+00 -0.8974436568E+00 0.3620081133E+00 - -0.2818962295E+00 -0.9068239149E+00 0.3439547701E+00 - -0.2731837090E+00 -0.9159234337E+00 0.3264274889E+00 - -0.2631089938E+00 -0.9247027579E+00 0.3095132432E+00 - -0.2517139001E+00 -0.9331224883E+00 0.2932959513E+00 - -0.2390494126E+00 -0.9411436951E+00 0.2778559055E+00 - -0.2251750759E+00 -0.9487283386E+00 0.2632692642E+00 - -0.2101583237E+00 -0.9558396835E+00 0.2496076103E+00 - -0.1940737662E+00 -0.9624426944E+00 0.2369375742E+00 - -0.1770024544E+00 -0.9685044010E+00 0.2253205211E+00 - -0.1590311390E+00 -0.9739942270E+00 0.2148122980E+00 - -0.1402515399E+00 -0.9788842768E+00 0.2054630353E+00 - -0.1207596391E+00 -0.9831495783E+00 0.1973169962E+00 - -0.1006550063E+00 -0.9867682820E+00 0.1904124681E+00 - -0.8004016474E-01 -0.9897218201E+00 0.1847816862E+00 - -0.5901999861E-01 -0.9919950276E+00 0.1804507847E+00 - -0.3770120119E-01 -0.9935762323E+00 0.1774397668E+00 - -0.1619176011E-01 -0.9944573171E+00 0.1757624877E+00 - 0.5399528292E-02 -0.9946337604E+00 0.1754266469E+00 - 0.2696352476E-01 -0.9941046569E+00 0.1764337843E+00 - 0.4839120411E-01 -0.9928727215E+00 0.1787792801E+00 - 0.6957411512E-01 -0.9909442774E+00 0.1824523562E+00 - 0.9040485500E-01 -0.9883292256E+00 0.1874360820E+00 - 0.1107775685E+00 -0.9850409939E+00 0.1937073861E+00 - 0.1305884806E+00 -0.9810964619E+00 0.2012370788E+00 - 0.1497364679E+00 -0.9765158557E+00 0.2099898912E+00 - 0.1681236727E+00 -0.9713226084E+00 0.2199245367E+00 - 0.1856561576E+00 -0.9655431814E+00 0.2309938024E+00 - 0.2022445971E+00 -0.9592068438E+00 0.2431446780E+00 - 0.2178049982E+00 -0.9523454074E+00 0.2563185294E+00 - 0.2322594375E+00 -0.9449929186E+00 0.2704513234E+00 - 0.2455368021E+00 -0.9371853104E+00 0.2854739104E+00 - 0.2575735158E+00 -0.9289600213E+00 0.3013123685E+00 - 0.2683142337E+00 -0.9203555886E+00 0.3178884131E+00 - 0.2777124839E+00 -0.9114112293E+00 0.3351198714E+00 - 0.2857312381E+00 -0.9021664210E+00 0.3529212223E+00 - 0.2923433910E+00 -0.8926604990E+00 0.3712041973E+00 - 0.2975321311E+00 -0.8829322832E+00 0.3898784361E+00 - 0.3012911860E+00 -0.8730197527E+00 0.4088521901E+00 - 0.3036249294E+00 -0.8629597798E+00 0.4280330611E+00 - 0.3045483392E+00 -0.8527879366E+00 0.4473287661E+00 - 0.3040867986E+00 -0.8425383828E+00 0.4666479113E+00 - 0.3022757370E+00 -0.8322438396E+00 0.4859007624E+00 - -object "edge list" class array type int rank 0 items 101 data follows - - 0 - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - 13 - 14 - 15 - 16 - 17 - 18 - 19 - 20 - 21 - 22 - 23 - 24 - 25 - 26 - 27 - 28 - 29 - 30 - 31 - 32 - 33 - 34 - 35 - 36 - 37 - 38 - 39 - 40 - 41 - 42 - 43 - 44 - 45 - 46 - 47 - 48 - 49 - 50 - 51 - 52 - 53 - 54 - 55 - 56 - 57 - 58 - 59 - 60 - 61 - 62 - 63 - 64 - 65 - 66 - 67 - 68 - 69 - 70 - 71 - 72 - 73 - 74 - 75 - 76 - 77 - 78 - 79 - 80 - 81 - 82 - 83 - 84 - 85 - 86 - 87 - 88 - 89 - 90 - 91 - 92 - 93 - 94 - 95 - 96 - 97 - 98 - 99 - 100 - -attribute "ref" string "positions" - -object "loops list" class array type int rank 0 items 1 data follows - - 0 - -attribute "ref" string "edges" - -object "face list" class array type int rank 0 items 1 data follows - - 0 - -attribute "ref" string "loops" - -object 0 class array type float rank 0 items 1 data follows - - 1.0 - -attribute "dep" string "faces" - -object "tpg" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 0 diff --git a/grid_gen/global_scvt/dx/vor.area.data b/grid_gen/global_scvt/dx/vor.area.data deleted file mode 100644 index 5e356160e..000000000 --- a/grid_gen/global_scvt/dx/vor.area.data +++ /dev/null @@ -1,162 +0,0 @@ - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.6733679265E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7879554004E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.7631251040E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 - 0.8026187129E-01 diff --git a/grid_gen/global_scvt/dx/vor.edge.data b/grid_gen/global_scvt/dx/vor.edge.data deleted file mode 100644 index 6019cd51f..000000000 --- a/grid_gen/global_scvt/dx/vor.edge.data +++ /dev/null @@ -1,960 +0,0 @@ - 0 - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - 13 - 14 - 15 - 16 - 17 - 18 - 19 - 20 - 21 - 22 - 23 - 24 - 25 - 26 - 27 - 28 - 29 - 30 - 31 - 32 - 33 - 34 - 35 - 36 - 37 - 38 - 39 - 40 - 41 - 42 - 43 - 44 - 45 - 46 - 47 - 48 - 49 - 50 - 51 - 52 - 53 - 54 - 55 - 56 - 57 - 58 - 59 - 60 - 61 - 62 - 63 - 64 - 65 - 66 - 67 - 68 - 69 - 70 - 71 - 72 - 73 - 74 - 75 - 76 - 77 - 78 - 79 - 80 - 81 - 82 - 83 - 84 - 85 - 86 - 87 - 88 - 89 - 90 - 91 - 92 - 93 - 94 - 95 - 96 - 97 - 98 - 99 - 100 - 101 - 102 - 103 - 104 - 105 - 106 - 107 - 108 - 109 - 110 - 111 - 112 - 113 - 114 - 115 - 116 - 117 - 118 - 119 - 120 - 121 - 122 - 123 - 124 - 125 - 126 - 127 - 128 - 129 - 130 - 131 - 132 - 133 - 134 - 135 - 136 - 137 - 138 - 139 - 140 - 141 - 142 - 143 - 144 - 145 - 146 - 147 - 148 - 149 - 150 - 151 - 152 - 153 - 154 - 155 - 156 - 157 - 158 - 159 - 160 - 161 - 162 - 163 - 164 - 165 - 166 - 167 - 168 - 169 - 170 - 171 - 172 - 173 - 174 - 175 - 176 - 177 - 178 - 179 - 180 - 181 - 182 - 183 - 184 - 185 - 186 - 187 - 188 - 189 - 190 - 191 - 192 - 193 - 194 - 195 - 196 - 197 - 198 - 199 - 200 - 201 - 202 - 203 - 204 - 205 - 206 - 207 - 208 - 209 - 210 - 211 - 212 - 213 - 214 - 215 - 216 - 217 - 218 - 219 - 220 - 221 - 222 - 223 - 224 - 225 - 226 - 227 - 228 - 229 - 230 - 231 - 232 - 233 - 234 - 235 - 236 - 237 - 238 - 239 - 240 - 241 - 242 - 243 - 244 - 245 - 246 - 247 - 248 - 249 - 250 - 251 - 252 - 253 - 254 - 255 - 256 - 257 - 258 - 259 - 260 - 261 - 262 - 263 - 264 - 265 - 266 - 267 - 268 - 269 - 270 - 271 - 272 - 273 - 274 - 275 - 276 - 277 - 278 - 279 - 280 - 281 - 282 - 283 - 284 - 285 - 286 - 287 - 288 - 289 - 290 - 291 - 292 - 293 - 294 - 295 - 296 - 297 - 298 - 299 - 300 - 301 - 302 - 303 - 304 - 305 - 306 - 307 - 308 - 309 - 310 - 311 - 312 - 313 - 314 - 315 - 316 - 317 - 318 - 319 - 320 - 321 - 322 - 323 - 324 - 325 - 326 - 327 - 328 - 329 - 330 - 331 - 332 - 333 - 334 - 335 - 336 - 337 - 338 - 339 - 340 - 341 - 342 - 343 - 344 - 345 - 346 - 347 - 348 - 349 - 350 - 351 - 352 - 353 - 354 - 355 - 356 - 357 - 358 - 359 - 360 - 361 - 362 - 363 - 364 - 365 - 366 - 367 - 368 - 369 - 370 - 371 - 372 - 373 - 374 - 375 - 376 - 377 - 378 - 379 - 380 - 381 - 382 - 383 - 384 - 385 - 386 - 387 - 388 - 389 - 390 - 391 - 392 - 393 - 394 - 395 - 396 - 397 - 398 - 399 - 400 - 401 - 402 - 403 - 404 - 405 - 406 - 407 - 408 - 409 - 410 - 411 - 412 - 413 - 414 - 415 - 416 - 417 - 418 - 419 - 420 - 421 - 422 - 423 - 424 - 425 - 426 - 427 - 428 - 429 - 430 - 431 - 432 - 433 - 434 - 435 - 436 - 437 - 438 - 439 - 440 - 441 - 442 - 443 - 444 - 445 - 446 - 447 - 448 - 449 - 450 - 451 - 452 - 453 - 454 - 455 - 456 - 457 - 458 - 459 - 460 - 461 - 462 - 463 - 464 - 465 - 466 - 467 - 468 - 469 - 470 - 471 - 472 - 473 - 474 - 475 - 476 - 477 - 478 - 479 - 480 - 481 - 482 - 483 - 484 - 485 - 486 - 487 - 488 - 489 - 490 - 491 - 492 - 493 - 494 - 495 - 496 - 497 - 498 - 499 - 500 - 501 - 502 - 503 - 504 - 505 - 506 - 507 - 508 - 509 - 510 - 511 - 512 - 513 - 514 - 515 - 516 - 517 - 518 - 519 - 520 - 521 - 522 - 523 - 524 - 525 - 526 - 527 - 528 - 529 - 530 - 531 - 532 - 533 - 534 - 535 - 536 - 537 - 538 - 539 - 540 - 541 - 542 - 543 - 544 - 545 - 546 - 547 - 548 - 549 - 550 - 551 - 552 - 553 - 554 - 555 - 556 - 557 - 558 - 559 - 560 - 561 - 562 - 563 - 564 - 565 - 566 - 567 - 568 - 569 - 570 - 571 - 572 - 573 - 574 - 575 - 576 - 577 - 578 - 579 - 580 - 581 - 582 - 583 - 584 - 585 - 586 - 587 - 588 - 589 - 590 - 591 - 592 - 593 - 594 - 595 - 596 - 597 - 598 - 599 - 600 - 601 - 602 - 603 - 604 - 605 - 606 - 607 - 608 - 609 - 610 - 611 - 612 - 613 - 614 - 615 - 616 - 617 - 618 - 619 - 620 - 621 - 622 - 623 - 624 - 625 - 626 - 627 - 628 - 629 - 630 - 631 - 632 - 633 - 634 - 635 - 636 - 637 - 638 - 639 - 640 - 641 - 642 - 643 - 644 - 645 - 646 - 647 - 648 - 649 - 650 - 651 - 652 - 653 - 654 - 655 - 656 - 657 - 658 - 659 - 660 - 661 - 662 - 663 - 664 - 665 - 666 - 667 - 668 - 669 - 670 - 671 - 672 - 673 - 674 - 675 - 676 - 677 - 678 - 679 - 680 - 681 - 682 - 683 - 684 - 685 - 686 - 687 - 688 - 689 - 690 - 691 - 692 - 693 - 694 - 695 - 696 - 697 - 698 - 699 - 700 - 701 - 702 - 703 - 704 - 705 - 706 - 707 - 708 - 709 - 710 - 711 - 712 - 713 - 714 - 715 - 716 - 717 - 718 - 719 - 720 - 721 - 722 - 723 - 724 - 725 - 726 - 727 - 728 - 729 - 730 - 731 - 732 - 733 - 734 - 735 - 736 - 737 - 738 - 739 - 740 - 741 - 742 - 743 - 744 - 745 - 746 - 747 - 748 - 749 - 750 - 751 - 752 - 753 - 754 - 755 - 756 - 757 - 758 - 759 - 760 - 761 - 762 - 763 - 764 - 765 - 766 - 767 - 768 - 769 - 770 - 771 - 772 - 773 - 774 - 775 - 776 - 777 - 778 - 779 - 780 - 781 - 782 - 783 - 784 - 785 - 786 - 787 - 788 - 789 - 790 - 791 - 792 - 793 - 794 - 795 - 796 - 797 - 798 - 799 - 800 - 801 - 802 - 803 - 804 - 805 - 806 - 807 - 808 - 809 - 810 - 811 - 812 - 813 - 814 - 815 - 816 - 817 - 818 - 819 - 820 - 821 - 822 - 823 - 824 - 825 - 826 - 827 - 828 - 829 - 830 - 831 - 832 - 833 - 834 - 835 - 836 - 837 - 838 - 839 - 840 - 841 - 842 - 843 - 844 - 845 - 846 - 847 - 848 - 849 - 850 - 851 - 852 - 853 - 854 - 855 - 856 - 857 - 858 - 859 - 860 - 861 - 862 - 863 - 864 - 865 - 866 - 867 - 868 - 869 - 870 - 871 - 872 - 873 - 874 - 875 - 876 - 877 - 878 - 879 - 880 - 881 - 882 - 883 - 884 - 885 - 886 - 887 - 888 - 889 - 890 - 891 - 892 - 893 - 894 - 895 - 896 - 897 - 898 - 899 - 900 - 901 - 902 - 903 - 904 - 905 - 906 - 907 - 908 - 909 - 910 - 911 - 912 - 913 - 914 - 915 - 916 - 917 - 918 - 919 - 920 - 921 - 922 - 923 - 924 - 925 - 926 - 927 - 928 - 929 - 930 - 931 - 932 - 933 - 934 - 935 - 936 - 937 - 938 - 939 - 940 - 941 - 942 - 943 - 944 - 945 - 946 - 947 - 948 - 949 - 950 - 951 - 952 - 953 - 954 - 955 - 956 - 957 - 958 - 959 diff --git a/grid_gen/global_scvt/dx/vor.face.data b/grid_gen/global_scvt/dx/vor.face.data deleted file mode 100644 index b8954ee4d..000000000 --- a/grid_gen/global_scvt/dx/vor.face.data +++ /dev/null @@ -1,162 +0,0 @@ - 0 - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - 13 - 14 - 15 - 16 - 17 - 18 - 19 - 20 - 21 - 22 - 23 - 24 - 25 - 26 - 27 - 28 - 29 - 30 - 31 - 32 - 33 - 34 - 35 - 36 - 37 - 38 - 39 - 40 - 41 - 42 - 43 - 44 - 45 - 46 - 47 - 48 - 49 - 50 - 51 - 52 - 53 - 54 - 55 - 56 - 57 - 58 - 59 - 60 - 61 - 62 - 63 - 64 - 65 - 66 - 67 - 68 - 69 - 70 - 71 - 72 - 73 - 74 - 75 - 76 - 77 - 78 - 79 - 80 - 81 - 82 - 83 - 84 - 85 - 86 - 87 - 88 - 89 - 90 - 91 - 92 - 93 - 94 - 95 - 96 - 97 - 98 - 99 - 100 - 101 - 102 - 103 - 104 - 105 - 106 - 107 - 108 - 109 - 110 - 111 - 112 - 113 - 114 - 115 - 116 - 117 - 118 - 119 - 120 - 121 - 122 - 123 - 124 - 125 - 126 - 127 - 128 - 129 - 130 - 131 - 132 - 133 - 134 - 135 - 136 - 137 - 138 - 139 - 140 - 141 - 142 - 143 - 144 - 145 - 146 - 147 - 148 - 149 - 150 - 151 - 152 - 153 - 154 - 155 - 156 - 157 - 158 - 159 - 160 - 161 diff --git a/grid_gen/global_scvt/dx/vor.loop.data b/grid_gen/global_scvt/dx/vor.loop.data deleted file mode 100644 index 4161f5396..000000000 --- a/grid_gen/global_scvt/dx/vor.loop.data +++ /dev/null @@ -1,162 +0,0 @@ - 0 - 5 - 10 - 15 - 20 - 25 - 30 - 35 - 40 - 45 - 50 - 55 - 60 - 66 - 72 - 78 - 84 - 90 - 96 - 102 - 108 - 114 - 120 - 126 - 132 - 138 - 144 - 150 - 156 - 162 - 168 - 174 - 180 - 186 - 192 - 198 - 204 - 210 - 216 - 222 - 228 - 234 - 240 - 246 - 252 - 258 - 264 - 270 - 276 - 282 - 288 - 294 - 300 - 306 - 312 - 318 - 324 - 330 - 336 - 342 - 348 - 354 - 360 - 366 - 372 - 378 - 384 - 390 - 396 - 402 - 408 - 414 - 420 - 426 - 432 - 438 - 444 - 450 - 456 - 462 - 468 - 474 - 480 - 486 - 492 - 498 - 504 - 510 - 516 - 522 - 528 - 534 - 540 - 546 - 552 - 558 - 564 - 570 - 576 - 582 - 588 - 594 - 600 - 606 - 612 - 618 - 624 - 630 - 636 - 642 - 648 - 654 - 660 - 666 - 672 - 678 - 684 - 690 - 696 - 702 - 708 - 714 - 720 - 726 - 732 - 738 - 744 - 750 - 756 - 762 - 768 - 774 - 780 - 786 - 792 - 798 - 804 - 810 - 816 - 822 - 828 - 834 - 840 - 846 - 852 - 858 - 864 - 870 - 876 - 882 - 888 - 894 - 900 - 906 - 912 - 918 - 924 - 930 - 936 - 942 - 948 - 954 diff --git a/grid_gen/global_scvt/dx/vor.position.data b/grid_gen/global_scvt/dx/vor.position.data deleted file mode 100644 index a627e7906..000000000 --- a/grid_gen/global_scvt/dx/vor.position.data +++ /dev/null @@ -1,960 +0,0 @@ - -0.8267643340E+00 0.2569457569E-01 0.5619613196E+00 - -0.9154295376E+00 0.7887094654E-01 0.3946747211E+00 - -0.9529269564E+00 -0.8415895755E-01 0.2912859173E+00 - -0.8874364321E+00 -0.2380933503E+00 0.3946747211E+00 - -0.8094636434E+00 -0.1702001330E+00 0.5619613196E+00 - -0.4779270666E-01 -0.9175770931E+00 0.3946747211E+00 - -0.8826807659E-01 -0.8224404063E+00 0.5619613196E+00 - -0.2799212232E+00 -0.7783595468E+00 0.5619613196E+00 - -0.3578940119E+00 -0.8462527641E+00 0.3946747211E+00 - -0.2144306989E+00 -0.9322939395E+00 0.2912859173E+00 - 0.6549052426E-01 -0.1539343928E+00 0.9859083091E+00 - 0.1666379923E+00 0.1471684647E-01 0.9859083091E+00 - 0.3749741879E-01 0.1630299041E+00 0.9859083091E+00 - -0.1434633130E+00 0.8604117543E-01 0.9859083091E+00 - -0.1261626223E+00 -0.1098535332E+00 0.9859083091E+00 - 0.8267643340E+00 -0.2569457569E-01 -0.5619613196E+00 - 0.8094636434E+00 0.1702001330E+00 -0.5619613196E+00 - 0.8874364321E+00 0.2380933503E+00 -0.3946747211E+00 - 0.9529269564E+00 0.8415895755E-01 -0.2912859173E+00 - 0.9154295376E+00 -0.7887094654E-01 -0.3946747211E+00 - 0.2799212232E+00 0.7783595468E+00 -0.5619613196E+00 - 0.8826807659E-01 0.8224404063E+00 -0.5619613196E+00 - 0.4779270666E-01 0.9175770931E+00 -0.3946747211E+00 - 0.2144306989E+00 0.9322939395E+00 -0.2912859173E+00 - 0.3578940119E+00 0.8462527641E+00 -0.3946747211E+00 - 0.3745105489E+00 -0.8802808433E+00 -0.2912859173E+00 - 0.2078725566E+00 -0.8949976898E+00 -0.3946747211E+00 - 0.2310472359E+00 -0.7942396679E+00 -0.5619613196E+00 - 0.4120079677E+00 -0.7172509392E+00 -0.5619613196E+00 - 0.5006731713E+00 -0.7704273101E+00 -0.3946747211E+00 - 0.6942388739E+00 -0.6018839178E+00 0.3946747211E+00 - 0.8204014962E+00 -0.4920303846E+00 0.2912859173E+00 - 0.8578989150E+00 -0.3290004805E+00 0.3946747211E+00 - 0.7549109720E+00 -0.3380959918E+00 0.5619613196E+00 - 0.6537635039E+00 -0.5067472311E+00 0.5619613196E+00 - -0.6942388739E+00 0.6018839178E+00 -0.3946747211E+00 - -0.6537635039E+00 0.5067472311E+00 -0.5619613196E+00 - -0.7549109720E+00 0.3380959918E+00 -0.5619613196E+00 - -0.8578989150E+00 0.3290004805E+00 -0.3946747211E+00 - -0.8204014962E+00 0.4920303846E+00 -0.2912859173E+00 - -0.3749741879E-01 -0.1630299041E+00 -0.9859083091E+00 - -0.1666379923E+00 -0.1471684647E-01 -0.9859083091E+00 - -0.6549052426E-01 0.1539343928E+00 -0.9859083091E+00 - 0.1261626223E+00 0.1098535332E+00 -0.9859083091E+00 - 0.1434633130E+00 -0.8604117543E-01 -0.9859083091E+00 - 0.5548287157E+00 0.6134855919E+00 0.5619613196E+00 - 0.6839692892E+00 0.4651725343E+00 0.5619613196E+00 - 0.7869572323E+00 0.4742680456E+00 0.3946747211E+00 - 0.7214667080E+00 0.6282024384E+00 0.2912859173E+00 - 0.5780033950E+00 0.7142436138E+00 0.3946747211E+00 - -0.6839692892E+00 -0.4651725343E+00 -0.5619613196E+00 - -0.5548287157E+00 -0.6134855919E+00 -0.5619613196E+00 - -0.5780033950E+00 -0.7142436138E+00 -0.3946747211E+00 - -0.7214667080E+00 -0.6282024384E+00 -0.2912859173E+00 - -0.7869572323E+00 -0.4742680456E+00 -0.3946747211E+00 - -0.2310472359E+00 0.7942396679E+00 0.5619613196E+00 - -0.2078725566E+00 0.8949976898E+00 0.3946747211E+00 - -0.3745105489E+00 0.8802808433E+00 0.2912859173E+00 - -0.5006731713E+00 0.7704273101E+00 0.3946747211E+00 - -0.4120079677E+00 0.7172509392E+00 0.5619613196E+00 - -0.7981815150E+00 0.4787040975E+00 0.3657166338E+00 - -0.8359870647E+00 0.3390817753E+00 0.4314500867E+00 - -0.7443815262E+00 0.2841419580E+00 0.6042842801E+00 - -0.6374812308E+00 0.3823251622E+00 0.6689134105E+00 - -0.6012080497E+00 0.5228665124E+00 0.6042842801E+00 - -0.6928135883E+00 0.5778063297E+00 0.4314500867E+00 - -0.9629456817E+00 0.2078908386E+00 -0.1718051609E+00 - -0.9929343613E+00 0.7282159007E-01 -0.9369295664E-01 - -0.9957492651E+00 0.6526862018E-01 0.6498775488E-01 - -0.9570083487E+00 0.2337049578E+00 0.1718051609E+00 - -0.9249725723E+00 0.3683033672E+00 0.9369295664E-01 - -0.9242047653E+00 0.3763271762E+00 -0.6498775488E-01 - -0.8439420100E+00 -0.5324833101E+00 -0.6498775489E-01 - -0.8461042293E+00 -0.5247182702E+00 0.9369295664E-01 - -0.9012345902E+00 -0.3978180491E+00 0.1718051609E+00 - -0.9688969256E+00 -0.2387788502E+00 0.6498775488E-01 - -0.9648018180E+00 -0.2457215536E+00 -0.9369295664E-01 - -0.9116043454E+00 -0.3734441112E+00 -0.1718051609E+00 - -0.5606053708E+00 -0.4881357060E+00 0.6689134105E+00 - -0.6830591083E+00 -0.4102081952E+00 0.6042842801E+00 - -0.7636176477E+00 -0.4803529024E+00 0.4314500867E+00 - -0.7019263040E+00 -0.6111880296E+00 0.3657166338E+00 - -0.5808201421E+00 -0.6902889144E+00 0.4314500867E+00 - -0.5002616026E+00 -0.6201442072E+00 0.6042842801E+00 - -0.6427339198E+00 0.4522094255E-01 0.7647536693E+00 - -0.6248594999E+00 -0.1571700695E+00 0.7647536693E+00 - -0.4996310559E+00 -0.2230338127E+00 0.8370332887E+00 - -0.3889846814E+00 -0.1363384845E+00 0.9110997395E+00 - -0.4068591013E+00 0.6605252750E-01 0.9110997395E+00 - -0.5309875045E+00 0.1320134222E+00 0.8370332887E+00 - -0.6435033919E+00 -0.7626794715E+00 -0.6498775489E-01 - -0.4952825170E+00 -0.8515739633E+00 -0.1718051609E+00 - -0.3760910397E+00 -0.9218335857E+00 -0.9369295664E-01 - -0.3697775916E+00 -0.9268447143E+00 0.6498775488E-01 - -0.5179984665E+00 -0.8379502225E+00 0.1718051609E+00 - -0.6361095616E+00 -0.7658891927E+00 0.9369295664E-01 - 0.2375761442E+00 -0.9668398035E+00 0.9369295664E-01 - 0.9985064359E-01 -0.9800575675E+00 0.1718051609E+00 - -0.7231343441E-01 -0.9952624573E+00 0.6498775488E-01 - -0.6444507322E-01 -0.9935131919E+00 -0.9369295664E-01 - 0.7346522053E-01 -0.9823878298E+00 -0.1718051609E+00 - 0.2456292985E+00 -0.9671829400E+00 -0.6498775489E-01 - 0.3643672015E+00 -0.8564390733E+00 0.3657166338E+00 - 0.4770204756E+00 -0.7657037865E+00 0.4314500867E+00 - 0.4352028525E+00 -0.6674121560E+00 0.6042842801E+00 - 0.2910080573E+00 -0.6840096197E+00 0.6689134105E+00 - 0.1790543045E+00 -0.7763891195E+00 0.6042842801E+00 - 0.2208719276E+00 -0.8746807500E+00 0.4314500867E+00 - -0.2416233762E+00 -0.5973022429E+00 0.7647536693E+00 - -0.4361458581E-01 -0.6428449216E+00 0.7647536693E+00 - 0.5772327375E-01 -0.5440986099E+00 0.8370332887E+00 - 0.9462727051E-02 -0.4120773247E+00 0.9110997395E+00 - -0.1885460633E+00 -0.3665346460E+00 0.9110997395E+00 - -0.2896363881E+00 -0.4642047353E+00 0.8370332887E+00 - 0.2731140292E+00 0.4741139110E+00 0.8370332887E+00 - 0.2345574672E+00 0.3389396101E+00 0.9110997395E+00 - 0.3679806288E+00 0.1857081622E+00 0.9110997395E+00 - 0.5071734576E+00 0.2053055222E+00 0.8370332887E+00 - 0.5465628671E+00 0.3412050082E+00 0.7647536693E+00 - 0.4131397056E+00 0.4944364560E+00 0.7647536693E+00 - -0.6290668992E-01 0.4073573530E+00 0.9110997395E+00 - -0.3853193725E-01 0.5457935172E+00 0.8370332887E+00 - -0.1556080320E+00 0.6252503224E+00 0.7647536693E+00 - -0.3425698233E+00 0.5457084767E+00 0.7647536693E+00 - -0.3665122481E+00 0.4062561330E+00 0.8370332887E+00 - -0.2498684812E+00 0.3278155072E+00 0.9110997395E+00 - 0.3948329683E+00 -0.1183393081E+00 0.9110997395E+00 - 0.2903312257E+00 -0.2925833968E+00 0.9110997395E+00 - 0.3519823723E+00 -0.4189077264E+00 0.8370332887E+00 - 0.4934024609E+00 -0.4143740303E+00 0.7647536693E+00 - 0.5979042035E+00 -0.2401299416E+00 0.7647536693E+00 - 0.5353060010E+00 -0.1132376214E+00 0.8370332887E+00 - 0.8461042293E+00 0.5247182702E+00 -0.9369295664E-01 - 0.8439420100E+00 0.5324833101E+00 0.6498775489E-01 - 0.9116043454E+00 0.3734441112E+00 0.1718051609E+00 - 0.9648018180E+00 0.2457215536E+00 0.9369295664E-01 - 0.9688969256E+00 0.2387788502E+00 -0.6498775488E-01 - 0.9012345902E+00 0.3978180491E+00 -0.1718051609E+00 - 0.9929343613E+00 -0.7282159007E-01 0.9369295664E-01 - 0.9629456817E+00 -0.2078908386E+00 0.1718051609E+00 - 0.9242047653E+00 -0.3763271762E+00 0.6498775488E-01 - 0.9249725723E+00 -0.3683033672E+00 -0.9369295664E-01 - 0.9570083487E+00 -0.2337049578E+00 -0.1718051609E+00 - 0.9957492651E+00 -0.6526862018E-01 -0.6498775488E-01 - 0.8359870647E+00 -0.3390817753E+00 -0.4314500867E+00 - 0.7981815150E+00 -0.4787040975E+00 -0.3657166338E+00 - 0.6928135883E+00 -0.5778063297E+00 -0.4314500867E+00 - 0.6012080497E+00 -0.5228665124E+00 -0.6042842801E+00 - 0.6374812308E+00 -0.3823251622E+00 -0.6689134105E+00 - 0.7443815262E+00 -0.2841419580E+00 -0.6042842801E+00 - 0.6427339198E+00 -0.4522094255E-01 -0.7647536693E+00 - 0.5309875045E+00 -0.1320134222E+00 -0.8370332887E+00 - 0.4068591013E+00 -0.6605252750E-01 -0.9110997395E+00 - 0.3889846814E+00 0.1363384845E+00 -0.9110997395E+00 - 0.4996310559E+00 0.2230338127E+00 -0.8370332887E+00 - 0.6248594999E+00 0.1571700695E+00 -0.7647536693E+00 - 0.6830591083E+00 0.4102081952E+00 -0.6042842801E+00 - 0.5606053708E+00 0.4881357060E+00 -0.6689134105E+00 - 0.5002616026E+00 0.6201442072E+00 -0.6042842801E+00 - 0.5808201421E+00 0.6902889144E+00 -0.4314500867E+00 - 0.7019263040E+00 0.6111880296E+00 -0.3657166338E+00 - 0.7636176477E+00 0.4803529024E+00 -0.4314500867E+00 - 0.6435033919E+00 0.7626794715E+00 0.6498775489E-01 - 0.6361095616E+00 0.7658891927E+00 -0.9369295664E-01 - 0.5179984665E+00 0.8379502225E+00 -0.1718051609E+00 - 0.3697775916E+00 0.9268447143E+00 -0.6498775488E-01 - 0.3760910397E+00 0.9218335857E+00 0.9369295664E-01 - 0.4952825170E+00 0.8515739633E+00 0.1718051609E+00 - 0.2416233762E+00 0.5973022429E+00 -0.7647536693E+00 - 0.2896363881E+00 0.4642047353E+00 -0.8370332887E+00 - 0.1885460633E+00 0.3665346460E+00 -0.9110997395E+00 - -0.9462727051E-02 0.4120773247E+00 -0.9110997395E+00 - -0.5772327375E-01 0.5440986099E+00 -0.8370332887E+00 - 0.4361458581E-01 0.6428449216E+00 -0.7647536693E+00 - -0.4770204756E+00 0.7657037865E+00 -0.4314500867E+00 - -0.3643672015E+00 0.8564390733E+00 -0.3657166338E+00 - -0.2208719276E+00 0.8746807500E+00 -0.4314500867E+00 - -0.1790543045E+00 0.7763891195E+00 -0.6042842801E+00 - -0.2910080573E+00 0.6840096197E+00 -0.6689134105E+00 - -0.4352028525E+00 0.6674121560E+00 -0.6042842801E+00 - -0.9985064359E-01 0.9800575675E+00 -0.1718051609E+00 - -0.2375761442E+00 0.9668398035E+00 -0.9369295664E-01 - -0.2456292985E+00 0.9671829400E+00 0.6498775489E-01 - -0.7346522053E-01 0.9823878298E+00 0.1718051609E+00 - 0.6444507322E-01 0.9935131919E+00 0.9369295664E-01 - 0.7231343441E-01 0.9952624573E+00 -0.6498775488E-01 - -0.1666203030E+00 -0.7244256510E+00 -0.6689134105E+00 - -0.4020851882E-01 -0.7957535949E+00 -0.6042842801E+00 - -0.6415172192E-01 -0.8998529765E+00 -0.4314500867E+00 - -0.2086229985E+00 -0.9070434324E+00 -0.3657166338E+00 - -0.3354353023E+00 -0.8374568530E+00 -0.4314500867E+00 - -0.3114920992E+00 -0.7333574715E+00 -0.6042842801E+00 - 0.3853193725E-01 -0.5457935172E+00 -0.8370332887E+00 - 0.6290668992E-01 -0.4073573530E+00 -0.9110997395E+00 - 0.2498684812E+00 -0.3278155072E+00 -0.9110997395E+00 - 0.3665122481E+00 -0.4062561330E+00 -0.8370332887E+00 - 0.3425698233E+00 -0.5457084767E+00 -0.7647536693E+00 - 0.1556080320E+00 -0.6252503224E+00 -0.7647536693E+00 - 0.5264977972E+00 -0.8476890121E+00 -0.6498775489E-01 - 0.6568442522E+00 -0.7341924919E+00 -0.1718051609E+00 - 0.7604973159E+00 -0.6425460780E+00 -0.9369295664E-01 - 0.7672141452E+00 -0.6380901559E+00 0.6498775488E-01 - 0.6368676903E+00 -0.7515866762E+00 0.1718051609E+00 - 0.5318352427E+00 -0.8416489200E+00 0.9369295664E-01 - 0.7404582412E+00 0.6539451243E-01 0.6689134105E+00 - 0.7937207543E+00 -0.6962666910E-01 0.6042842801E+00 - 0.9001240061E+00 -0.6022953042E-01 0.4314500867E+00 - 0.9271176189E+00 0.8187957304E-01 0.3657166338E+00 - 0.8756350093E+00 0.2170579490E+00 0.4314500867E+00 - 0.7692317574E+00 0.2076608103E+00 0.6042842801E+00 - -0.3948329683E+00 0.1183393081E+00 -0.9110997395E+00 - -0.5353060010E+00 0.1132376214E+00 -0.8370332887E+00 - -0.5979042035E+00 0.2401299416E+00 -0.7647536693E+00 - -0.4934024609E+00 0.4143740303E+00 -0.7647536693E+00 - -0.3519823723E+00 0.4189077264E+00 -0.8370332887E+00 - -0.2903312257E+00 0.2925833968E+00 -0.9110997395E+00 - -0.7937207543E+00 0.6962666910E-01 -0.6042842801E+00 - -0.7404582412E+00 -0.6539451243E-01 -0.6689134105E+00 - -0.7692317574E+00 -0.2076608103E+00 -0.6042842801E+00 - -0.8756350093E+00 -0.2170579490E+00 -0.4314500867E+00 - -0.9271176189E+00 -0.8187957304E-01 -0.3657166338E+00 - -0.9001240061E+00 0.6022953042E-01 -0.4314500867E+00 - -0.5264977972E+00 0.8476890121E+00 0.6498775489E-01 - -0.5318352427E+00 0.8416489200E+00 -0.9369295664E-01 - -0.6368676903E+00 0.7515866762E+00 -0.1718051609E+00 - -0.7672141452E+00 0.6380901559E+00 -0.6498775488E-01 - -0.7604973159E+00 0.6425460780E+00 0.9369295664E-01 - -0.6568442522E+00 0.7341924919E+00 0.1718051609E+00 - -0.2345574672E+00 -0.3389396101E+00 -0.9110997395E+00 - -0.2731140292E+00 -0.4741139110E+00 -0.8370332887E+00 - -0.4131397056E+00 -0.4944364560E+00 -0.7647536693E+00 - -0.5465628671E+00 -0.3412050082E+00 -0.7647536693E+00 - -0.5071734576E+00 -0.2053055222E+00 -0.8370332887E+00 - -0.3679806288E+00 -0.1857081622E+00 -0.9110997395E+00 - 0.4020851882E-01 0.7957535949E+00 0.6042842801E+00 - 0.1666203030E+00 0.7244256510E+00 0.6689134105E+00 - 0.3114920992E+00 0.7333574715E+00 0.6042842801E+00 - 0.3354353023E+00 0.8374568530E+00 0.4314500867E+00 - 0.2086229985E+00 0.9070434324E+00 0.3657166338E+00 - 0.6415172192E-01 0.8998529765E+00 0.4314500867E+00 - -0.8267643340E+00 0.2569457569E-01 0.5619613196E+00 - -0.7591065523E+00 0.1081792529E+00 0.6419147073E+00 - -0.7443815262E+00 0.2841419580E+00 0.6042842801E+00 - -0.8359870647E+00 0.3390817753E+00 0.4314500867E+00 - -0.9164945828E+00 0.2025716858E+00 0.3449672329E+00 - -0.9154295376E+00 0.7887094654E-01 0.3946747211E+00 - -0.9154295376E+00 0.7887094654E-01 0.3946747211E+00 - -0.9164945828E+00 0.2025716858E+00 0.3449672329E+00 - -0.9570083487E+00 0.2337049578E+00 0.1718051609E+00 - -0.9957492651E+00 0.6526862018E-01 0.6498775488E-01 - -0.9830555860E+00 -0.8681980583E-01 0.1614436008E+00 - -0.9529269564E+00 -0.8415895755E-01 0.2912859173E+00 - -0.9529269564E+00 -0.8415895755E-01 0.2912859173E+00 - -0.9830555860E+00 -0.8681980583E-01 0.1614436008E+00 - -0.9688969256E+00 -0.2387788502E+00 0.6498775488E-01 - -0.9012345902E+00 -0.3978180491E+00 0.1718051609E+00 - -0.8668045177E+00 -0.3600660166E+00 0.3449672329E+00 - -0.8874364321E+00 -0.2380933503E+00 0.3946747211E+00 - -0.8874364321E+00 -0.2380933503E+00 0.3946747211E+00 - -0.8668045177E+00 -0.3600660166E+00 0.3449672329E+00 - -0.7636176477E+00 -0.4803529024E+00 0.4314500867E+00 - -0.6830591083E+00 -0.4102081952E+00 0.6042842801E+00 - -0.7283964032E+00 -0.2395499705E+00 0.6419147073E+00 - -0.8094636434E+00 -0.1702001330E+00 0.5619613196E+00 - -0.8094636434E+00 -0.1702001330E+00 0.5619613196E+00 - -0.7283964032E+00 -0.2395499705E+00 0.6419147073E+00 - -0.6248594999E+00 -0.1571700695E+00 0.7647536693E+00 - -0.6427339198E+00 0.4522094255E-01 0.7647536693E+00 - -0.7591065523E+00 0.1081792529E+00 0.6419147073E+00 - -0.8267643340E+00 0.2569457569E-01 0.5619613196E+00 - -0.8826807659E-01 -0.8224404063E+00 0.5619613196E+00 - 0.2738693219E-02 -0.7667711576E+00 0.6419147073E+00 - -0.4361458581E-01 -0.6428449216E+00 0.7647536693E+00 - -0.2416233762E+00 -0.5973022429E+00 0.7647536693E+00 - -0.3374614086E+00 -0.6885240056E+00 0.6419147073E+00 - -0.2799212232E+00 -0.7783595468E+00 0.5619613196E+00 - -0.2799212232E+00 -0.7783595468E+00 0.5619613196E+00 - -0.3374614086E+00 -0.6885240056E+00 0.6419147073E+00 - -0.5002616026E+00 -0.6201442072E+00 0.6042842801E+00 - -0.5808201421E+00 -0.6902889144E+00 0.4314500867E+00 - -0.4758695232E+00 -0.8090400517E+00 0.3449672329E+00 - -0.3578940119E+00 -0.8462527641E+00 0.3946747211E+00 - -0.3578940119E+00 -0.8462527641E+00 0.3946747211E+00 - -0.4758695232E+00 -0.8090400517E+00 0.3449672329E+00 - -0.5179984665E+00 -0.8379502225E+00 0.1718051609E+00 - -0.3697775916E+00 -0.9268447143E+00 0.6498775488E-01 - -0.2212103404E+00 -0.9617702164E+00 0.1614436008E+00 - -0.2144306989E+00 -0.9322939395E+00 0.2912859173E+00 - -0.2144306989E+00 -0.9322939395E+00 0.2912859173E+00 - -0.2212103404E+00 -0.9617702164E+00 0.1614436008E+00 - -0.7231343441E-01 -0.9952624573E+00 0.6498775488E-01 - 0.9985064359E-01 -0.9800575675E+00 0.1718051609E+00 - 0.7458580462E-01 -0.9356466032E+00 0.3449672329E+00 - -0.4779270666E-01 -0.9175770931E+00 0.3946747211E+00 - -0.4779270666E-01 -0.9175770931E+00 0.3946747211E+00 - 0.7458580462E-01 -0.9356466032E+00 0.3449672329E+00 - 0.2208719276E+00 -0.8746807500E+00 0.4314500867E+00 - 0.1790543045E+00 -0.7763891195E+00 0.6042842801E+00 - 0.2738693219E-02 -0.7667711576E+00 0.6419147073E+00 - -0.8826807659E-01 -0.8224404063E+00 0.5619613196E+00 - 0.6549052426E-01 -0.1539343928E+00 0.9859083091E+00 - 0.1162510682E+00 -0.2732462108E+00 0.9548938147E+00 - 0.2903312257E+00 -0.2925833968E+00 0.9110997395E+00 - 0.3948329683E+00 -0.1183393081E+00 0.9110997395E+00 - 0.2957961450E+00 0.2612361319E-01 0.9548938147E+00 - 0.1666379923E+00 0.1471684647E-01 0.9859083091E+00 - 0.1666379923E+00 0.1471684647E-01 0.9859083091E+00 - 0.2957961450E+00 0.2612361319E-01 0.9548938147E+00 - 0.3679806288E+00 0.1857081622E+00 0.9110997395E+00 - 0.2345574672E+00 0.3389396101E+00 0.9110997395E+00 - 0.6656100312E-01 0.2893914916E+00 0.9548938147E+00 - 0.3749741879E-01 0.1630299041E+00 0.9859083091E+00 - 0.3749741879E-01 0.1630299041E+00 0.9859083091E+00 - 0.6656100312E-01 0.2893914916E+00 0.9548938147E+00 - -0.6290668992E-01 0.4073573530E+00 0.9110997395E+00 - -0.2498684812E+00 0.3278155072E+00 0.9110997395E+00 - -0.2546591828E+00 0.1527301647E+00 0.9548938147E+00 - -0.1434633130E+00 0.8604117543E-01 0.9859083091E+00 - -0.1434633130E+00 0.8604117543E-01 0.9859083091E+00 - -0.2546591828E+00 0.1527301647E+00 0.9548938147E+00 - -0.4068591013E+00 0.6605252750E-01 0.9110997395E+00 - -0.3889846814E+00 -0.1363384845E+00 0.9110997395E+00 - -0.2239490336E+00 -0.1949990588E+00 0.9548938147E+00 - -0.1261626223E+00 -0.1098535332E+00 0.9859083091E+00 - -0.1261626223E+00 -0.1098535332E+00 0.9859083091E+00 - -0.2239490336E+00 -0.1949990588E+00 0.9548938147E+00 - -0.1885460633E+00 -0.3665346460E+00 0.9110997395E+00 - 0.9462727051E-02 -0.4120773247E+00 0.9110997395E+00 - 0.1162510682E+00 -0.2732462108E+00 0.9548938147E+00 - 0.6549052426E-01 -0.1539343928E+00 0.9859083091E+00 - 0.8094636434E+00 0.1702001330E+00 -0.5619613196E+00 - 0.7283964032E+00 0.2395499705E+00 -0.6419147073E+00 - 0.6830591083E+00 0.4102081952E+00 -0.6042842801E+00 - 0.7636176477E+00 0.4803529024E+00 -0.4314500867E+00 - 0.8668045177E+00 0.3600660166E+00 -0.3449672329E+00 - 0.8874364321E+00 0.2380933503E+00 -0.3946747211E+00 - 0.8874364321E+00 0.2380933503E+00 -0.3946747211E+00 - 0.8668045177E+00 0.3600660166E+00 -0.3449672329E+00 - 0.9012345902E+00 0.3978180491E+00 -0.1718051609E+00 - 0.9688969256E+00 0.2387788502E+00 -0.6498775488E-01 - 0.9830555860E+00 0.8681980583E-01 -0.1614436008E+00 - 0.9529269564E+00 0.8415895755E-01 -0.2912859173E+00 - 0.9529269564E+00 0.8415895755E-01 -0.2912859173E+00 - 0.9830555860E+00 0.8681980583E-01 -0.1614436008E+00 - 0.9957492651E+00 -0.6526862018E-01 -0.6498775488E-01 - 0.9570083487E+00 -0.2337049578E+00 -0.1718051609E+00 - 0.9164945828E+00 -0.2025716858E+00 -0.3449672329E+00 - 0.9154295376E+00 -0.7887094654E-01 -0.3946747211E+00 - 0.9154295376E+00 -0.7887094654E-01 -0.3946747211E+00 - 0.9164945828E+00 -0.2025716858E+00 -0.3449672329E+00 - 0.8359870647E+00 -0.3390817753E+00 -0.4314500867E+00 - 0.7443815262E+00 -0.2841419580E+00 -0.6042842801E+00 - 0.7591065523E+00 -0.1081792529E+00 -0.6419147073E+00 - 0.8267643340E+00 -0.2569457569E-01 -0.5619613196E+00 - 0.8267643340E+00 -0.2569457569E-01 -0.5619613196E+00 - 0.7591065523E+00 -0.1081792529E+00 -0.6419147073E+00 - 0.6427339198E+00 -0.4522094255E-01 -0.7647536693E+00 - 0.6248594999E+00 0.1571700695E+00 -0.7647536693E+00 - 0.7283964032E+00 0.2395499705E+00 -0.6419147073E+00 - 0.8094636434E+00 0.1702001330E+00 -0.5619613196E+00 - 0.2144306989E+00 0.9322939395E+00 -0.2912859173E+00 - 0.2212103404E+00 0.9617702164E+00 -0.1614436008E+00 - 0.3697775916E+00 0.9268447143E+00 -0.6498775488E-01 - 0.5179984665E+00 0.8379502225E+00 -0.1718051609E+00 - 0.4758695232E+00 0.8090400517E+00 -0.3449672329E+00 - 0.3578940119E+00 0.8462527641E+00 -0.3946747211E+00 - 0.3578940119E+00 0.8462527641E+00 -0.3946747211E+00 - 0.4758695232E+00 0.8090400517E+00 -0.3449672329E+00 - 0.5808201421E+00 0.6902889144E+00 -0.4314500867E+00 - 0.5002616026E+00 0.6201442072E+00 -0.6042842801E+00 - 0.3374614086E+00 0.6885240056E+00 -0.6419147073E+00 - 0.2799212232E+00 0.7783595468E+00 -0.5619613196E+00 - 0.2799212232E+00 0.7783595468E+00 -0.5619613196E+00 - 0.3374614086E+00 0.6885240056E+00 -0.6419147073E+00 - 0.2416233762E+00 0.5973022429E+00 -0.7647536693E+00 - 0.4361458581E-01 0.6428449216E+00 -0.7647536693E+00 - -0.2738693219E-02 0.7667711576E+00 -0.6419147073E+00 - 0.8826807659E-01 0.8224404063E+00 -0.5619613196E+00 - 0.8826807659E-01 0.8224404063E+00 -0.5619613196E+00 - -0.2738693219E-02 0.7667711576E+00 -0.6419147073E+00 - -0.1790543045E+00 0.7763891195E+00 -0.6042842801E+00 - -0.2208719276E+00 0.8746807500E+00 -0.4314500867E+00 - -0.7458580462E-01 0.9356466032E+00 -0.3449672329E+00 - 0.4779270666E-01 0.9175770931E+00 -0.3946747211E+00 - 0.4779270666E-01 0.9175770931E+00 -0.3946747211E+00 - -0.7458580462E-01 0.9356466032E+00 -0.3449672329E+00 - -0.9985064359E-01 0.9800575675E+00 -0.1718051609E+00 - 0.7231343441E-01 0.9952624573E+00 -0.6498775488E-01 - 0.2212103404E+00 0.9617702164E+00 -0.1614436008E+00 - 0.2144306989E+00 0.9322939395E+00 -0.2912859173E+00 - 0.2078725566E+00 -0.8949976898E+00 -0.3946747211E+00 - 0.9055527954E-01 -0.9342362386E+00 -0.3449672329E+00 - -0.6415172192E-01 -0.8998529765E+00 -0.4314500867E+00 - -0.4020851882E-01 -0.7957535949E+00 -0.6042842801E+00 - 0.1316922418E+00 -0.7553824608E+00 -0.6419147073E+00 - 0.2310472359E+00 -0.7942396679E+00 -0.5619613196E+00 - 0.2310472359E+00 -0.7942396679E+00 -0.5619613196E+00 - 0.1316922418E+00 -0.7553824608E+00 -0.6419147073E+00 - 0.1556080320E+00 -0.6252503224E+00 -0.7647536693E+00 - 0.3425698233E+00 -0.5457084767E+00 -0.7647536693E+00 - 0.4529124277E+00 -0.6187211338E+00 -0.6419147073E+00 - 0.4120079677E+00 -0.7172509392E+00 -0.5619613196E+00 - 0.4120079677E+00 -0.7172509392E+00 -0.5619613196E+00 - 0.4529124277E+00 -0.6187211338E+00 -0.6419147073E+00 - 0.6012080497E+00 -0.5228665124E+00 -0.6042842801E+00 - 0.6928135883E+00 -0.5778063297E+00 -0.4314500867E+00 - 0.6103004582E+00 -0.7131135667E+00 -0.3449672329E+00 - 0.5006731713E+00 -0.7704273101E+00 -0.3946747211E+00 - 0.5006731713E+00 -0.7704273101E+00 -0.3946747211E+00 - 0.6103004582E+00 -0.7131135667E+00 -0.3449672329E+00 - 0.6568442522E+00 -0.7341924919E+00 -0.1718051609E+00 - 0.5264977972E+00 -0.8476890121E+00 -0.6498775489E-01 - 0.3863514246E+00 -0.9081126255E+00 -0.1614436008E+00 - 0.3745105489E+00 -0.8802808433E+00 -0.2912859173E+00 - 0.3745105489E+00 -0.8802808433E+00 -0.2912859173E+00 - 0.3863514246E+00 -0.9081126255E+00 -0.1614436008E+00 - 0.2456292985E+00 -0.9671829400E+00 -0.6498775489E-01 - 0.7346522053E-01 -0.9823878298E+00 -0.1718051609E+00 - 0.9055527954E-01 -0.9342362386E+00 -0.3449672329E+00 - 0.2078725566E+00 -0.8949976898E+00 -0.3946747211E+00 - 0.6942388739E+00 -0.6018839178E+00 0.3946747211E+00 - 0.6223910433E+00 -0.7025859360E+00 0.3449672329E+00 - 0.6368676903E+00 -0.7515866762E+00 0.1718051609E+00 - 0.7672141452E+00 -0.6380901559E+00 0.6498775488E-01 - 0.8463400769E+00 -0.5075868772E+00 0.1614436008E+00 - 0.8204014962E+00 -0.4920303846E+00 0.2912859173E+00 - 0.8204014962E+00 -0.4920303846E+00 0.2912859173E+00 - 0.8463400769E+00 -0.5075868772E+00 0.1614436008E+00 - 0.9242047653E+00 -0.3763271762E+00 0.6498775488E-01 - 0.9629456817E+00 -0.2078908386E+00 0.1718051609E+00 - 0.9129010801E+00 -0.2181953856E+00 0.3449672329E+00 - 0.8578989150E+00 -0.3290004805E+00 0.3946747211E+00 - 0.8578989150E+00 -0.3290004805E+00 0.3946747211E+00 - 0.9129010801E+00 -0.2181953856E+00 0.3449672329E+00 - 0.9001240061E+00 -0.6022953042E-01 0.4314500867E+00 - 0.7937207543E+00 -0.6962666910E-01 0.6042842801E+00 - 0.7300890087E+00 -0.2343406665E+00 0.6419147073E+00 - 0.7549109720E+00 -0.3380959918E+00 0.5619613196E+00 - 0.7549109720E+00 -0.3380959918E+00 0.5619613196E+00 - 0.7300890087E+00 -0.2343406665E+00 0.6419147073E+00 - 0.5979042035E+00 -0.2401299416E+00 0.7647536693E+00 - 0.4934024609E+00 -0.4143740303E+00 0.7647536693E+00 - 0.5505439319E+00 -0.5337104904E+00 0.6419147073E+00 - 0.6537635039E+00 -0.5067472311E+00 0.5619613196E+00 - 0.6537635039E+00 -0.5067472311E+00 0.5619613196E+00 - 0.5505439319E+00 -0.5337104904E+00 0.6419147073E+00 - 0.4352028525E+00 -0.6674121560E+00 0.6042842801E+00 - 0.4770204756E+00 -0.7657037865E+00 0.4314500867E+00 - 0.6223910433E+00 -0.7025859360E+00 0.3449672329E+00 - 0.6942388739E+00 -0.6018839178E+00 0.3946747211E+00 - -0.6942388739E+00 0.6018839178E+00 -0.3946747211E+00 - -0.6223910433E+00 0.7025859360E+00 -0.3449672329E+00 - -0.4770204756E+00 0.7657037865E+00 -0.4314500867E+00 - -0.4352028525E+00 0.6674121560E+00 -0.6042842801E+00 - -0.5505439319E+00 0.5337104904E+00 -0.6419147073E+00 - -0.6537635039E+00 0.5067472311E+00 -0.5619613196E+00 - -0.6537635039E+00 0.5067472311E+00 -0.5619613196E+00 - -0.5505439319E+00 0.5337104904E+00 -0.6419147073E+00 - -0.4934024609E+00 0.4143740303E+00 -0.7647536693E+00 - -0.5979042035E+00 0.2401299416E+00 -0.7647536693E+00 - -0.7300890087E+00 0.2343406665E+00 -0.6419147073E+00 - -0.7549109720E+00 0.3380959918E+00 -0.5619613196E+00 - -0.7549109720E+00 0.3380959918E+00 -0.5619613196E+00 - -0.7300890087E+00 0.2343406665E+00 -0.6419147073E+00 - -0.7937207543E+00 0.6962666910E-01 -0.6042842801E+00 - -0.9001240061E+00 0.6022953042E-01 -0.4314500867E+00 - -0.9129010801E+00 0.2181953856E+00 -0.3449672329E+00 - -0.8578989150E+00 0.3290004805E+00 -0.3946747211E+00 - -0.8578989150E+00 0.3290004805E+00 -0.3946747211E+00 - -0.9129010801E+00 0.2181953856E+00 -0.3449672329E+00 - -0.9629456817E+00 0.2078908386E+00 -0.1718051609E+00 - -0.9242047653E+00 0.3763271762E+00 -0.6498775488E-01 - -0.8463400769E+00 0.5075868772E+00 -0.1614436008E+00 - -0.8204014962E+00 0.4920303846E+00 -0.2912859173E+00 - -0.8204014962E+00 0.4920303846E+00 -0.2912859173E+00 - -0.8463400769E+00 0.5075868772E+00 -0.1614436008E+00 - -0.7672141452E+00 0.6380901559E+00 -0.6498775488E-01 - -0.6368676903E+00 0.7515866762E+00 -0.1718051609E+00 - -0.6223910433E+00 0.7025859360E+00 -0.3449672329E+00 - -0.6942388739E+00 0.6018839178E+00 -0.3946747211E+00 - -0.6549052426E-01 0.1539343928E+00 -0.9859083091E+00 - -0.1162510682E+00 0.2732462108E+00 -0.9548938147E+00 - -0.9462727051E-02 0.4120773247E+00 -0.9110997395E+00 - 0.1885460633E+00 0.3665346460E+00 -0.9110997395E+00 - 0.2239490336E+00 0.1949990588E+00 -0.9548938147E+00 - 0.1261626223E+00 0.1098535332E+00 -0.9859083091E+00 - 0.1261626223E+00 0.1098535332E+00 -0.9859083091E+00 - 0.2239490336E+00 0.1949990588E+00 -0.9548938147E+00 - 0.3889846814E+00 0.1363384845E+00 -0.9110997395E+00 - 0.4068591013E+00 -0.6605252750E-01 -0.9110997395E+00 - 0.2546591828E+00 -0.1527301647E+00 -0.9548938147E+00 - 0.1434633130E+00 -0.8604117543E-01 -0.9859083091E+00 - 0.1434633130E+00 -0.8604117543E-01 -0.9859083091E+00 - 0.2546591828E+00 -0.1527301647E+00 -0.9548938147E+00 - 0.2498684812E+00 -0.3278155072E+00 -0.9110997395E+00 - 0.6290668992E-01 -0.4073573530E+00 -0.9110997395E+00 - -0.6656100312E-01 -0.2893914916E+00 -0.9548938147E+00 - -0.3749741879E-01 -0.1630299041E+00 -0.9859083091E+00 - -0.3749741879E-01 -0.1630299041E+00 -0.9859083091E+00 - -0.6656100312E-01 -0.2893914916E+00 -0.9548938147E+00 - -0.2345574672E+00 -0.3389396101E+00 -0.9110997395E+00 - -0.3679806288E+00 -0.1857081622E+00 -0.9110997395E+00 - -0.2957961450E+00 -0.2612361319E-01 -0.9548938147E+00 - -0.1666379923E+00 -0.1471684647E-01 -0.9859083091E+00 - -0.1666379923E+00 -0.1471684647E-01 -0.9859083091E+00 - -0.2957961450E+00 -0.2612361319E-01 -0.9548938147E+00 - -0.3948329683E+00 0.1183393081E+00 -0.9110997395E+00 - -0.2903312257E+00 0.2925833968E+00 -0.9110997395E+00 - -0.1162510682E+00 0.2732462108E+00 -0.9548938147E+00 - -0.6549052426E-01 0.1539343928E+00 -0.9859083091E+00 - 0.5780033950E+00 0.7142436138E+00 0.3946747211E+00 - 0.4896180912E+00 0.8007944387E+00 0.3449672329E+00 - 0.3354353023E+00 0.8374568530E+00 0.4314500867E+00 - 0.3114920992E+00 0.7333574715E+00 0.6042842801E+00 - 0.4484811290E+00 0.6219406608E+00 0.6419147073E+00 - 0.5548287157E+00 0.6134855919E+00 0.5619613196E+00 - 0.5548287157E+00 0.6134855919E+00 0.5619613196E+00 - 0.4484811290E+00 0.6219406608E+00 0.6419147073E+00 - 0.4131397056E+00 0.4944364560E+00 0.7647536693E+00 - 0.5465628671E+00 0.3412050082E+00 0.7647536693E+00 - 0.6777162709E+00 0.3586727823E+00 0.6419147073E+00 - 0.6839692892E+00 0.4651725343E+00 0.5619613196E+00 - 0.6839692892E+00 0.4651725343E+00 0.5619613196E+00 - 0.6777162709E+00 0.3586727823E+00 0.6419147073E+00 - 0.7692317574E+00 0.2076608103E+00 0.6042842801E+00 - 0.8756350093E+00 0.2170579490E+00 0.4314500867E+00 - 0.8605283422E+00 0.3748180632E+00 0.3449672329E+00 - 0.7869572323E+00 0.4742680456E+00 0.3946747211E+00 - 0.7869572323E+00 0.4742680456E+00 0.3946747211E+00 - 0.8605283422E+00 0.3748180632E+00 0.3449672329E+00 - 0.9116043454E+00 0.3734441112E+00 0.1718051609E+00 - 0.8439420100E+00 0.5324833101E+00 0.6498775489E-01 - 0.7442772740E+00 0.6480642740E+00 0.1614436008E+00 - 0.7214667080E+00 0.6282024384E+00 0.2912859173E+00 - 0.7214667080E+00 0.6282024384E+00 0.2912859173E+00 - 0.7442772740E+00 0.6480642740E+00 0.1614436008E+00 - 0.6435033919E+00 0.7626794715E+00 0.6498775489E-01 - 0.4952825170E+00 0.8515739633E+00 0.1718051609E+00 - 0.4896180912E+00 0.8007944387E+00 0.3449672329E+00 - 0.5780033950E+00 0.7142436138E+00 0.3946747211E+00 - -0.5780033950E+00 -0.7142436138E+00 -0.3946747211E+00 - -0.4896180912E+00 -0.8007944387E+00 -0.3449672329E+00 - -0.4952825170E+00 -0.8515739633E+00 -0.1718051609E+00 - -0.6435033919E+00 -0.7626794715E+00 -0.6498775489E-01 - -0.7442772740E+00 -0.6480642740E+00 -0.1614436008E+00 - -0.7214667080E+00 -0.6282024384E+00 -0.2912859173E+00 - -0.7214667080E+00 -0.6282024384E+00 -0.2912859173E+00 - -0.7442772740E+00 -0.6480642740E+00 -0.1614436008E+00 - -0.8439420100E+00 -0.5324833101E+00 -0.6498775489E-01 - -0.9116043454E+00 -0.3734441112E+00 -0.1718051609E+00 - -0.8605283422E+00 -0.3748180632E+00 -0.3449672329E+00 - -0.7869572323E+00 -0.4742680456E+00 -0.3946747211E+00 - -0.7869572323E+00 -0.4742680456E+00 -0.3946747211E+00 - -0.8605283422E+00 -0.3748180632E+00 -0.3449672329E+00 - -0.8756350093E+00 -0.2170579490E+00 -0.4314500867E+00 - -0.7692317574E+00 -0.2076608103E+00 -0.6042842801E+00 - -0.6777162709E+00 -0.3586727823E+00 -0.6419147073E+00 - -0.6839692892E+00 -0.4651725343E+00 -0.5619613196E+00 - -0.6839692892E+00 -0.4651725343E+00 -0.5619613196E+00 - -0.6777162709E+00 -0.3586727823E+00 -0.6419147073E+00 - -0.5465628671E+00 -0.3412050082E+00 -0.7647536693E+00 - -0.4131397056E+00 -0.4944364560E+00 -0.7647536693E+00 - -0.4484811290E+00 -0.6219406608E+00 -0.6419147073E+00 - -0.5548287157E+00 -0.6134855919E+00 -0.5619613196E+00 - -0.5548287157E+00 -0.6134855919E+00 -0.5619613196E+00 - -0.4484811290E+00 -0.6219406608E+00 -0.6419147073E+00 - -0.3114920992E+00 -0.7333574715E+00 -0.6042842801E+00 - -0.3354353023E+00 -0.8374568530E+00 -0.4314500867E+00 - -0.4896180912E+00 -0.8007944387E+00 -0.3449672329E+00 - -0.5780033950E+00 -0.7142436138E+00 -0.3946747211E+00 - -0.2078725566E+00 0.8949976898E+00 0.3946747211E+00 - -0.9055527954E-01 0.9342362386E+00 0.3449672329E+00 - -0.7346522053E-01 0.9823878298E+00 0.1718051609E+00 - -0.2456292985E+00 0.9671829400E+00 0.6498775489E-01 - -0.3863514246E+00 0.9081126255E+00 0.1614436008E+00 - -0.3745105489E+00 0.8802808433E+00 0.2912859173E+00 - -0.3745105489E+00 0.8802808433E+00 0.2912859173E+00 - -0.3863514246E+00 0.9081126255E+00 0.1614436008E+00 - -0.5264977972E+00 0.8476890121E+00 0.6498775489E-01 - -0.6568442522E+00 0.7341924919E+00 0.1718051609E+00 - -0.6103004582E+00 0.7131135667E+00 0.3449672329E+00 - -0.5006731713E+00 0.7704273101E+00 0.3946747211E+00 - -0.5006731713E+00 0.7704273101E+00 0.3946747211E+00 - -0.6103004582E+00 0.7131135667E+00 0.3449672329E+00 - -0.6928135883E+00 0.5778063297E+00 0.4314500867E+00 - -0.6012080497E+00 0.5228665124E+00 0.6042842801E+00 - -0.4529124277E+00 0.6187211338E+00 0.6419147073E+00 - -0.4120079677E+00 0.7172509392E+00 0.5619613196E+00 - -0.4120079677E+00 0.7172509392E+00 0.5619613196E+00 - -0.4529124277E+00 0.6187211338E+00 0.6419147073E+00 - -0.3425698233E+00 0.5457084767E+00 0.7647536693E+00 - -0.1556080320E+00 0.6252503224E+00 0.7647536693E+00 - -0.1316922418E+00 0.7553824608E+00 0.6419147073E+00 - -0.2310472359E+00 0.7942396679E+00 0.5619613196E+00 - -0.2310472359E+00 0.7942396679E+00 0.5619613196E+00 - -0.1316922418E+00 0.7553824608E+00 0.6419147073E+00 - 0.4020851882E-01 0.7957535949E+00 0.6042842801E+00 - 0.6415172192E-01 0.8998529765E+00 0.4314500867E+00 - -0.9055527954E-01 0.9342362386E+00 0.3449672329E+00 - -0.2078725566E+00 0.8949976898E+00 0.3946747211E+00 - -0.6928135883E+00 0.5778063297E+00 0.4314500867E+00 - -0.6103004582E+00 0.7131135667E+00 0.3449672329E+00 - -0.6568442522E+00 0.7341924919E+00 0.1718051609E+00 - -0.7604973159E+00 0.6425460780E+00 0.9369295664E-01 - -0.8423651527E+00 0.5052029426E+00 0.1875924741E+00 - -0.7981815150E+00 0.4787040975E+00 0.3657166338E+00 - -0.8423651527E+00 0.5052029426E+00 0.1875924741E+00 - -0.9249725723E+00 0.3683033672E+00 0.9369295664E-01 - -0.9570083487E+00 0.2337049578E+00 0.1718051609E+00 - -0.9164945828E+00 0.2025716858E+00 0.3449672329E+00 - -0.8359870647E+00 0.3390817753E+00 0.4314500867E+00 - -0.7981815150E+00 0.4787040975E+00 0.3657166338E+00 - -0.5309875045E+00 0.1320134222E+00 0.8370332887E+00 - -0.5206102953E+00 0.3122325898E+00 0.7946544723E+00 - -0.6374812308E+00 0.3823251622E+00 0.6689134105E+00 - -0.7443815262E+00 0.2841419580E+00 0.6042842801E+00 - -0.7591065523E+00 0.1081792529E+00 0.6419147073E+00 - -0.6427339198E+00 0.4522094255E-01 0.7647536693E+00 - -0.5206102953E+00 0.3122325898E+00 0.7946544723E+00 - -0.3665122481E+00 0.4062561330E+00 0.8370332887E+00 - -0.3425698233E+00 0.5457084767E+00 0.7647536693E+00 - -0.4529124277E+00 0.6187211338E+00 0.6419147073E+00 - -0.6012080497E+00 0.5228665124E+00 0.6042842801E+00 - -0.6374812308E+00 0.3823251622E+00 0.6689134105E+00 - -0.9629456817E+00 0.2078908386E+00 -0.1718051609E+00 - -0.9129010801E+00 0.2181953856E+00 -0.3449672329E+00 - -0.9001240061E+00 0.6022953042E-01 -0.4314500867E+00 - -0.9271176189E+00 -0.8187957304E-01 -0.3657166338E+00 - -0.9784385631E+00 -0.8641204758E-01 -0.1875924741E+00 - -0.9929343613E+00 0.7282159007E-01 -0.9369295664E-01 - -0.9784385631E+00 -0.8641204758E-01 -0.1875924741E+00 - -0.9648018180E+00 -0.2457215536E+00 -0.9369295664E-01 - -0.9688969256E+00 -0.2387788502E+00 0.6498775488E-01 - -0.9830555860E+00 -0.8681980583E-01 0.1614436008E+00 - -0.9957492651E+00 0.6526862018E-01 0.6498775488E-01 - -0.9929343613E+00 0.7282159007E-01 -0.9369295664E-01 - -0.7604973159E+00 0.6425460780E+00 0.9369295664E-01 - -0.7672141452E+00 0.6380901559E+00 -0.6498775488E-01 - -0.8463400769E+00 0.5075868772E+00 -0.1614436008E+00 - -0.9242047653E+00 0.3763271762E+00 -0.6498775488E-01 - -0.9249725723E+00 0.3683033672E+00 0.9369295664E-01 - -0.8423651527E+00 0.5052029426E+00 0.1875924741E+00 - -0.6361095616E+00 -0.7658891927E+00 0.9369295664E-01 - -0.7407816983E+00 -0.6450205727E+00 0.1875924741E+00 - -0.8461042293E+00 -0.5247182702E+00 0.9369295664E-01 - -0.8439420100E+00 -0.5324833101E+00 -0.6498775489E-01 - -0.7442772740E+00 -0.6480642740E+00 -0.1614436008E+00 - -0.6435033919E+00 -0.7626794715E+00 -0.6498775489E-01 - -0.7407816983E+00 -0.6450205727E+00 0.1875924741E+00 - -0.7019263040E+00 -0.6111880296E+00 0.3657166338E+00 - -0.7636176477E+00 -0.4803529024E+00 0.4314500867E+00 - -0.8668045177E+00 -0.3600660166E+00 0.3449672329E+00 - -0.9012345902E+00 -0.3978180491E+00 0.1718051609E+00 - -0.8461042293E+00 -0.5247182702E+00 0.9369295664E-01 - -0.9271176189E+00 -0.8187957304E-01 -0.3657166338E+00 - -0.8756350093E+00 -0.2170579490E+00 -0.4314500867E+00 - -0.8605283422E+00 -0.3748180632E+00 -0.3449672329E+00 - -0.9116043454E+00 -0.3734441112E+00 -0.1718051609E+00 - -0.9648018180E+00 -0.2457215536E+00 -0.9369295664E-01 - -0.9784385631E+00 -0.8641204758E-01 -0.1875924741E+00 - -0.2896363881E+00 -0.4642047353E+00 0.8370332887E+00 - -0.4578282678E+00 -0.3986446373E+00 0.7946544723E+00 - -0.5606053708E+00 -0.4881357060E+00 0.6689134105E+00 - -0.5002616026E+00 -0.6201442072E+00 0.6042842801E+00 - -0.3374614086E+00 -0.6885240056E+00 0.6419147073E+00 - -0.2416233762E+00 -0.5973022429E+00 0.7647536693E+00 - -0.4578282678E+00 -0.3986446373E+00 0.7946544723E+00 - -0.4996310559E+00 -0.2230338127E+00 0.8370332887E+00 - -0.6248594999E+00 -0.1571700695E+00 0.7647536693E+00 - -0.7283964032E+00 -0.2395499705E+00 0.6419147073E+00 - -0.6830591083E+00 -0.4102081952E+00 0.6042842801E+00 - -0.5606053708E+00 -0.4881357060E+00 0.6689134105E+00 - -0.7407816983E+00 -0.6450205727E+00 0.1875924741E+00 - -0.6361095616E+00 -0.7658891927E+00 0.9369295664E-01 - -0.5179984665E+00 -0.8379502225E+00 0.1718051609E+00 - -0.4758695232E+00 -0.8090400517E+00 0.3449672329E+00 - -0.5808201421E+00 -0.6902889144E+00 0.4314500867E+00 - -0.7019263040E+00 -0.6111880296E+00 0.3657166338E+00 - -0.5309875045E+00 0.1320134222E+00 0.8370332887E+00 - -0.4068591013E+00 0.6605252750E-01 0.9110997395E+00 - -0.2546591828E+00 0.1527301647E+00 0.9548938147E+00 - -0.2498684812E+00 0.3278155072E+00 0.9110997395E+00 - -0.3665122481E+00 0.4062561330E+00 0.8370332887E+00 - -0.5206102953E+00 0.3122325898E+00 0.7946544723E+00 - -0.4578282678E+00 -0.3986446373E+00 0.7946544723E+00 - -0.2896363881E+00 -0.4642047353E+00 0.8370332887E+00 - -0.1885460633E+00 -0.3665346460E+00 0.9110997395E+00 - -0.2239490336E+00 -0.1949990588E+00 0.9548938147E+00 - -0.3889846814E+00 -0.1363384845E+00 0.9110997395E+00 - -0.4996310559E+00 -0.2230338127E+00 0.8370332887E+00 - -0.2086229985E+00 -0.9070434324E+00 -0.3657166338E+00 - -0.2201714030E+00 -0.9572531624E+00 -0.1875924741E+00 - -0.3760910397E+00 -0.9218335857E+00 -0.9369295664E-01 - -0.4952825170E+00 -0.8515739633E+00 -0.1718051609E+00 - -0.4896180912E+00 -0.8007944387E+00 -0.3449672329E+00 - -0.3354353023E+00 -0.8374568530E+00 -0.4314500867E+00 - -0.2201714030E+00 -0.9572531624E+00 -0.1875924741E+00 - -0.6444507322E-01 -0.9935131919E+00 -0.9369295664E-01 - -0.7231343441E-01 -0.9952624573E+00 0.6498775488E-01 - -0.2212103404E+00 -0.9617702164E+00 0.1614436008E+00 - -0.3697775916E+00 -0.9268447143E+00 0.6498775488E-01 - -0.3760910397E+00 -0.9218335857E+00 -0.9369295664E-01 - -0.2201714030E+00 -0.9572531624E+00 -0.1875924741E+00 - -0.2086229985E+00 -0.9070434324E+00 -0.3657166338E+00 - -0.6415172192E-01 -0.8998529765E+00 -0.4314500867E+00 - 0.9055527954E-01 -0.9342362386E+00 -0.3449672329E+00 - 0.7346522053E-01 -0.9823878298E+00 -0.1718051609E+00 - -0.6444507322E-01 -0.9935131919E+00 -0.9369295664E-01 - 0.2456292985E+00 -0.9671829400E+00 -0.6498775489E-01 - 0.3863514246E+00 -0.9081126255E+00 -0.1614436008E+00 - 0.5264977972E+00 -0.8476890121E+00 -0.6498775489E-01 - 0.5318352427E+00 -0.8416489200E+00 0.9369295664E-01 - 0.3845368849E+00 -0.9038475800E+00 0.1875924741E+00 - 0.2375761442E+00 -0.9668398035E+00 0.9369295664E-01 - 0.3845368849E+00 -0.9038475800E+00 0.1875924741E+00 - 0.3643672015E+00 -0.8564390733E+00 0.3657166338E+00 - 0.2208719276E+00 -0.8746807500E+00 0.4314500867E+00 - 0.7458580462E-01 -0.9356466032E+00 0.3449672329E+00 - 0.9985064359E-01 -0.9800575675E+00 0.1718051609E+00 - 0.2375761442E+00 -0.9668398035E+00 0.9369295664E-01 - 0.5318352427E+00 -0.8416489200E+00 0.9369295664E-01 - 0.6368676903E+00 -0.7515866762E+00 0.1718051609E+00 - 0.6223910433E+00 -0.7025859360E+00 0.3449672329E+00 - 0.4770204756E+00 -0.7657037865E+00 0.4314500867E+00 - 0.3643672015E+00 -0.8564390733E+00 0.3657166338E+00 - 0.3845368849E+00 -0.9038475800E+00 0.1875924741E+00 - 0.3519823723E+00 -0.4189077264E+00 0.8370332887E+00 - 0.2376568648E+00 -0.5586085251E+00 0.7946544723E+00 - 0.2910080573E+00 -0.6840096197E+00 0.6689134105E+00 - 0.4352028525E+00 -0.6674121560E+00 0.6042842801E+00 - 0.5505439319E+00 -0.5337104904E+00 0.6419147073E+00 - 0.4934024609E+00 -0.4143740303E+00 0.7647536693E+00 - 0.2376568648E+00 -0.5586085251E+00 0.7946544723E+00 - 0.5772327375E-01 -0.5440986099E+00 0.8370332887E+00 - -0.4361458581E-01 -0.6428449216E+00 0.7647536693E+00 - 0.2738693219E-02 -0.7667711576E+00 0.6419147073E+00 - 0.1790543045E+00 -0.7763891195E+00 0.6042842801E+00 - 0.2910080573E+00 -0.6840096197E+00 0.6689134105E+00 - 0.3519823723E+00 -0.4189077264E+00 0.8370332887E+00 - 0.2903312257E+00 -0.2925833968E+00 0.9110997395E+00 - 0.1162510682E+00 -0.2732462108E+00 0.9548938147E+00 - 0.9462727051E-02 -0.4120773247E+00 0.9110997395E+00 - 0.5772327375E-01 -0.5440986099E+00 0.8370332887E+00 - 0.2376568648E+00 -0.5586085251E+00 0.7946544723E+00 - 0.5353060010E+00 -0.1132376214E+00 0.8370332887E+00 - 0.6047082879E+00 0.5340558244E-01 0.7946544723E+00 - 0.5071734576E+00 0.2053055222E+00 0.8370332887E+00 - 0.3679806288E+00 0.1857081622E+00 0.9110997395E+00 - 0.2957961450E+00 0.2612361319E-01 0.9548938147E+00 - 0.3948329683E+00 -0.1183393081E+00 0.9110997395E+00 - 0.6047082879E+00 0.5340558244E-01 0.7946544723E+00 - 0.7404582412E+00 0.6539451243E-01 0.6689134105E+00 - 0.7692317574E+00 0.2076608103E+00 0.6042842801E+00 - 0.6777162709E+00 0.3586727823E+00 0.6419147073E+00 - 0.5465628671E+00 0.3412050082E+00 0.7647536693E+00 - 0.5071734576E+00 0.2053055222E+00 0.8370332887E+00 - 0.1666203030E+00 0.7244256510E+00 0.6689134105E+00 - 0.1360734104E+00 0.5916149902E+00 0.7946544723E+00 - 0.2731140292E+00 0.4741139110E+00 0.8370332887E+00 - 0.4131397056E+00 0.4944364560E+00 0.7647536693E+00 - 0.4484811290E+00 0.6219406608E+00 0.6419147073E+00 - 0.3114920992E+00 0.7333574715E+00 0.6042842801E+00 - 0.1360734104E+00 0.5916149902E+00 0.7946544723E+00 - -0.3853193725E-01 0.5457935172E+00 0.8370332887E+00 - -0.6290668992E-01 0.4073573530E+00 0.9110997395E+00 - 0.6656100312E-01 0.2893914916E+00 0.9548938147E+00 - 0.2345574672E+00 0.3389396101E+00 0.9110997395E+00 - 0.2731140292E+00 0.4741139110E+00 0.8370332887E+00 - 0.1360734104E+00 0.5916149902E+00 0.7946544723E+00 - 0.1666203030E+00 0.7244256510E+00 0.6689134105E+00 - 0.4020851882E-01 0.7957535949E+00 0.6042842801E+00 - -0.1316922418E+00 0.7553824608E+00 0.6419147073E+00 - -0.1556080320E+00 0.6252503224E+00 0.7647536693E+00 - -0.3853193725E-01 0.5457935172E+00 0.8370332887E+00 - 0.5353060010E+00 -0.1132376214E+00 0.8370332887E+00 - 0.5979042035E+00 -0.2401299416E+00 0.7647536693E+00 - 0.7300890087E+00 -0.2343406665E+00 0.6419147073E+00 - 0.7937207543E+00 -0.6962666910E-01 0.6042842801E+00 - 0.7404582412E+00 0.6539451243E-01 0.6689134105E+00 - 0.6047082879E+00 0.5340558244E-01 0.7946544723E+00 - 0.9116043454E+00 0.3734441112E+00 0.1718051609E+00 - 0.8605283422E+00 0.3748180632E+00 0.3449672329E+00 - 0.8756350093E+00 0.2170579490E+00 0.4314500867E+00 - 0.9271176189E+00 0.8187957304E-01 0.3657166338E+00 - 0.9784385631E+00 0.8641204758E-01 0.1875924741E+00 - 0.9648018180E+00 0.2457215536E+00 0.9369295664E-01 - 0.9784385631E+00 0.8641204758E-01 0.1875924741E+00 - 0.9929343613E+00 -0.7282159007E-01 0.9369295664E-01 - 0.9957492651E+00 -0.6526862018E-01 -0.6498775488E-01 - 0.9830555860E+00 0.8681980583E-01 -0.1614436008E+00 - 0.9688969256E+00 0.2387788502E+00 -0.6498775488E-01 - 0.9648018180E+00 0.2457215536E+00 0.9369295664E-01 - 0.9012345902E+00 0.3978180491E+00 -0.1718051609E+00 - 0.8668045177E+00 0.3600660166E+00 -0.3449672329E+00 - 0.7636176477E+00 0.4803529024E+00 -0.4314500867E+00 - 0.7019263040E+00 0.6111880296E+00 -0.3657166338E+00 - 0.7407816983E+00 0.6450205727E+00 -0.1875924741E+00 - 0.8461042293E+00 0.5247182702E+00 -0.9369295664E-01 - 0.7407816983E+00 0.6450205727E+00 -0.1875924741E+00 - 0.6361095616E+00 0.7658891927E+00 -0.9369295664E-01 - 0.6435033919E+00 0.7626794715E+00 0.6498775489E-01 - 0.7442772740E+00 0.6480642740E+00 0.1614436008E+00 - 0.8439420100E+00 0.5324833101E+00 0.6498775489E-01 - 0.8461042293E+00 0.5247182702E+00 -0.9369295664E-01 - 0.9242047653E+00 -0.3763271762E+00 0.6498775488E-01 - 0.8463400769E+00 -0.5075868772E+00 0.1614436008E+00 - 0.7672141452E+00 -0.6380901559E+00 0.6498775488E-01 - 0.7604973159E+00 -0.6425460780E+00 -0.9369295664E-01 - 0.8423651527E+00 -0.5052029426E+00 -0.1875924741E+00 - 0.9249725723E+00 -0.3683033672E+00 -0.9369295664E-01 - 0.8423651527E+00 -0.5052029426E+00 -0.1875924741E+00 - 0.7981815150E+00 -0.4787040975E+00 -0.3657166338E+00 - 0.8359870647E+00 -0.3390817753E+00 -0.4314500867E+00 - 0.9164945828E+00 -0.2025716858E+00 -0.3449672329E+00 - 0.9570083487E+00 -0.2337049578E+00 -0.1718051609E+00 - 0.9249725723E+00 -0.3683033672E+00 -0.9369295664E-01 - 0.9271176189E+00 0.8187957304E-01 0.3657166338E+00 - 0.9001240061E+00 -0.6022953042E-01 0.4314500867E+00 - 0.9129010801E+00 -0.2181953856E+00 0.3449672329E+00 - 0.9629456817E+00 -0.2078908386E+00 0.1718051609E+00 - 0.9929343613E+00 -0.7282159007E-01 0.9369295664E-01 - 0.9784385631E+00 0.8641204758E-01 0.1875924741E+00 - 0.6012080497E+00 -0.5228665124E+00 -0.6042842801E+00 - 0.4529124277E+00 -0.6187211338E+00 -0.6419147073E+00 - 0.3425698233E+00 -0.5457084767E+00 -0.7647536693E+00 - 0.3665122481E+00 -0.4062561330E+00 -0.8370332887E+00 - 0.5206102953E+00 -0.3122325898E+00 -0.7946544723E+00 - 0.6374812308E+00 -0.3823251622E+00 -0.6689134105E+00 - 0.5206102953E+00 -0.3122325898E+00 -0.7946544723E+00 - 0.5309875045E+00 -0.1320134222E+00 -0.8370332887E+00 - 0.6427339198E+00 -0.4522094255E-01 -0.7647536693E+00 - 0.7591065523E+00 -0.1081792529E+00 -0.6419147073E+00 - 0.7443815262E+00 -0.2841419580E+00 -0.6042842801E+00 - 0.6374812308E+00 -0.3823251622E+00 -0.6689134105E+00 - 0.7604973159E+00 -0.6425460780E+00 -0.9369295664E-01 - 0.6568442522E+00 -0.7341924919E+00 -0.1718051609E+00 - 0.6103004582E+00 -0.7131135667E+00 -0.3449672329E+00 - 0.6928135883E+00 -0.5778063297E+00 -0.4314500867E+00 - 0.7981815150E+00 -0.4787040975E+00 -0.3657166338E+00 - 0.8423651527E+00 -0.5052029426E+00 -0.1875924741E+00 - 0.2896363881E+00 0.4642047353E+00 -0.8370332887E+00 - 0.4578282678E+00 0.3986446373E+00 -0.7946544723E+00 - 0.4996310559E+00 0.2230338127E+00 -0.8370332887E+00 - 0.3889846814E+00 0.1363384845E+00 -0.9110997395E+00 - 0.2239490336E+00 0.1949990588E+00 -0.9548938147E+00 - 0.1885460633E+00 0.3665346460E+00 -0.9110997395E+00 - 0.4578282678E+00 0.3986446373E+00 -0.7946544723E+00 - 0.5606053708E+00 0.4881357060E+00 -0.6689134105E+00 - 0.6830591083E+00 0.4102081952E+00 -0.6042842801E+00 - 0.7283964032E+00 0.2395499705E+00 -0.6419147073E+00 - 0.6248594999E+00 0.1571700695E+00 -0.7647536693E+00 - 0.4996310559E+00 0.2230338127E+00 -0.8370332887E+00 - 0.5206102953E+00 -0.3122325898E+00 -0.7946544723E+00 - 0.3665122481E+00 -0.4062561330E+00 -0.8370332887E+00 - 0.2498684812E+00 -0.3278155072E+00 -0.9110997395E+00 - 0.2546591828E+00 -0.1527301647E+00 -0.9548938147E+00 - 0.4068591013E+00 -0.6605252750E-01 -0.9110997395E+00 - 0.5309875045E+00 -0.1320134222E+00 -0.8370332887E+00 - 0.7019263040E+00 0.6111880296E+00 -0.3657166338E+00 - 0.5808201421E+00 0.6902889144E+00 -0.4314500867E+00 - 0.4758695232E+00 0.8090400517E+00 -0.3449672329E+00 - 0.5179984665E+00 0.8379502225E+00 -0.1718051609E+00 - 0.6361095616E+00 0.7658891927E+00 -0.9369295664E-01 - 0.7407816983E+00 0.6450205727E+00 -0.1875924741E+00 - 0.4578282678E+00 0.3986446373E+00 -0.7946544723E+00 - 0.2896363881E+00 0.4642047353E+00 -0.8370332887E+00 - 0.2416233762E+00 0.5973022429E+00 -0.7647536693E+00 - 0.3374614086E+00 0.6885240056E+00 -0.6419147073E+00 - 0.5002616026E+00 0.6201442072E+00 -0.6042842801E+00 - 0.5606053708E+00 0.4881357060E+00 -0.6689134105E+00 - 0.3697775916E+00 0.9268447143E+00 -0.6498775488E-01 - 0.2212103404E+00 0.9617702164E+00 -0.1614436008E+00 - 0.7231343441E-01 0.9952624573E+00 -0.6498775488E-01 - 0.6444507322E-01 0.9935131919E+00 0.9369295664E-01 - 0.2201714030E+00 0.9572531624E+00 0.1875924741E+00 - 0.3760910397E+00 0.9218335857E+00 0.9369295664E-01 - 0.2201714030E+00 0.9572531624E+00 0.1875924741E+00 - 0.2086229985E+00 0.9070434324E+00 0.3657166338E+00 - 0.3354353023E+00 0.8374568530E+00 0.4314500867E+00 - 0.4896180912E+00 0.8007944387E+00 0.3449672329E+00 - 0.4952825170E+00 0.8515739633E+00 0.1718051609E+00 - 0.3760910397E+00 0.9218335857E+00 0.9369295664E-01 - -0.9462727051E-02 0.4120773247E+00 -0.9110997395E+00 - -0.1162510682E+00 0.2732462108E+00 -0.9548938147E+00 - -0.2903312257E+00 0.2925833968E+00 -0.9110997395E+00 - -0.3519823723E+00 0.4189077264E+00 -0.8370332887E+00 - -0.2376568648E+00 0.5586085251E+00 -0.7946544723E+00 - -0.5772327375E-01 0.5440986099E+00 -0.8370332887E+00 - -0.2376568648E+00 0.5586085251E+00 -0.7946544723E+00 - -0.2910080573E+00 0.6840096197E+00 -0.6689134105E+00 - -0.1790543045E+00 0.7763891195E+00 -0.6042842801E+00 - -0.2738693219E-02 0.7667711576E+00 -0.6419147073E+00 - 0.4361458581E-01 0.6428449216E+00 -0.7647536693E+00 - -0.5772327375E-01 0.5440986099E+00 -0.8370332887E+00 - -0.2376568648E+00 0.5586085251E+00 -0.7946544723E+00 - -0.3519823723E+00 0.4189077264E+00 -0.8370332887E+00 - -0.4934024609E+00 0.4143740303E+00 -0.7647536693E+00 - -0.5505439319E+00 0.5337104904E+00 -0.6419147073E+00 - -0.4352028525E+00 0.6674121560E+00 -0.6042842801E+00 - -0.2910080573E+00 0.6840096197E+00 -0.6689134105E+00 - -0.4770204756E+00 0.7657037865E+00 -0.4314500867E+00 - -0.6223910433E+00 0.7025859360E+00 -0.3449672329E+00 - -0.6368676903E+00 0.7515866762E+00 -0.1718051609E+00 - -0.5318352427E+00 0.8416489200E+00 -0.9369295664E-01 - -0.3845368849E+00 0.9038475800E+00 -0.1875924741E+00 - -0.3643672015E+00 0.8564390733E+00 -0.3657166338E+00 - -0.3845368849E+00 0.9038475800E+00 -0.1875924741E+00 - -0.2375761442E+00 0.9668398035E+00 -0.9369295664E-01 - -0.9985064359E-01 0.9800575675E+00 -0.1718051609E+00 - -0.7458580462E-01 0.9356466032E+00 -0.3449672329E+00 - -0.2208719276E+00 0.8746807500E+00 -0.4314500867E+00 - -0.3643672015E+00 0.8564390733E+00 -0.3657166338E+00 - -0.5318352427E+00 0.8416489200E+00 -0.9369295664E-01 - -0.5264977972E+00 0.8476890121E+00 0.6498775489E-01 - -0.3863514246E+00 0.9081126255E+00 0.1614436008E+00 - -0.2456292985E+00 0.9671829400E+00 0.6498775489E-01 - -0.2375761442E+00 0.9668398035E+00 -0.9369295664E-01 - -0.3845368849E+00 0.9038475800E+00 -0.1875924741E+00 - 0.6444507322E-01 0.9935131919E+00 0.9369295664E-01 - -0.7346522053E-01 0.9823878298E+00 0.1718051609E+00 - -0.9055527954E-01 0.9342362386E+00 0.3449672329E+00 - 0.6415172192E-01 0.8998529765E+00 0.4314500867E+00 - 0.2086229985E+00 0.9070434324E+00 0.3657166338E+00 - 0.2201714030E+00 0.9572531624E+00 0.1875924741E+00 - -0.3114920992E+00 -0.7333574715E+00 -0.6042842801E+00 - -0.4484811290E+00 -0.6219406608E+00 -0.6419147073E+00 - -0.4131397056E+00 -0.4944364560E+00 -0.7647536693E+00 - -0.2731140292E+00 -0.4741139110E+00 -0.8370332887E+00 - -0.1360734104E+00 -0.5916149902E+00 -0.7946544723E+00 - -0.1666203030E+00 -0.7244256510E+00 -0.6689134105E+00 - -0.1360734104E+00 -0.5916149902E+00 -0.7946544723E+00 - 0.3853193725E-01 -0.5457935172E+00 -0.8370332887E+00 - 0.1556080320E+00 -0.6252503224E+00 -0.7647536693E+00 - 0.1316922418E+00 -0.7553824608E+00 -0.6419147073E+00 - -0.4020851882E-01 -0.7957535949E+00 -0.6042842801E+00 - -0.1666203030E+00 -0.7244256510E+00 -0.6689134105E+00 - -0.2731140292E+00 -0.4741139110E+00 -0.8370332887E+00 - -0.2345574672E+00 -0.3389396101E+00 -0.9110997395E+00 - -0.6656100312E-01 -0.2893914916E+00 -0.9548938147E+00 - 0.6290668992E-01 -0.4073573530E+00 -0.9110997395E+00 - 0.3853193725E-01 -0.5457935172E+00 -0.8370332887E+00 - -0.1360734104E+00 -0.5916149902E+00 -0.7946544723E+00 - -0.3948329683E+00 0.1183393081E+00 -0.9110997395E+00 - -0.2957961450E+00 -0.2612361319E-01 -0.9548938147E+00 - -0.3679806288E+00 -0.1857081622E+00 -0.9110997395E+00 - -0.5071734576E+00 -0.2053055222E+00 -0.8370332887E+00 - -0.6047082879E+00 -0.5340558244E-01 -0.7946544723E+00 - -0.5353060010E+00 0.1132376214E+00 -0.8370332887E+00 - -0.6047082879E+00 -0.5340558244E-01 -0.7946544723E+00 - -0.7404582412E+00 -0.6539451243E-01 -0.6689134105E+00 - -0.7937207543E+00 0.6962666910E-01 -0.6042842801E+00 - -0.7300890087E+00 0.2343406665E+00 -0.6419147073E+00 - -0.5979042035E+00 0.2401299416E+00 -0.7647536693E+00 - -0.5353060010E+00 0.1132376214E+00 -0.8370332887E+00 - -0.5071734576E+00 -0.2053055222E+00 -0.8370332887E+00 - -0.5465628671E+00 -0.3412050082E+00 -0.7647536693E+00 - -0.6777162709E+00 -0.3586727823E+00 -0.6419147073E+00 - -0.7692317574E+00 -0.2076608103E+00 -0.6042842801E+00 - -0.7404582412E+00 -0.6539451243E-01 -0.6689134105E+00 - -0.6047082879E+00 -0.5340558244E-01 -0.7946544723E+00 diff --git a/grid_gen/global_scvt/dx/voronoi.dx b/grid_gen/global_scvt/dx/voronoi.dx deleted file mode 100644 index 7b8186c89..000000000 --- a/grid_gen/global_scvt/dx/voronoi.dx +++ /dev/null @@ -1,25 +0,0 @@ -object "positions list" class array type float rank 1 shape 3 items 960 -ascii data file vor.position.data - -object "edge list" class array type int rank 0 items 960 -ascii data file vor.edge.data -attribute "ref" string "positions" - -object "loops list" class array type int rank 0 items 162 -ascii data file vor.loop.data -attribute "ref" string "edges" - -object "face list" class array type int rank 0 items 162 -ascii data file vor.face.data -attribute "ref" string "loops" - -object 0 class array type float rank 0 items 162 -data file vor.area.data -attribute "dep" string "faces" - -object "area" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 0 diff --git a/grid_gen/global_scvt/locs.dat b/grid_gen/global_scvt/locs.dat deleted file mode 100644 index 90f85cf58..000000000 --- a/grid_gen/global_scvt/locs.dat +++ /dev/null @@ -1,163 +0,0 @@ - 162 162 - 1 -0.8909593038 -0.0786862054 0.4472135955 - 2 -0.2004865378 -0.8716680264 0.4472135955 - 3 0.0000000000 0.0000000000 1.0000000000 - 4 0.8909593038 0.0786862054 -0.4472135955 - 5 0.2004865378 0.8716680264 -0.4472135955 - 6 0.3501565946 -0.8230372770 -0.4472135955 - 7 0.7670518092 -0.4600342618 0.4472135955 - 8 -0.7670518092 0.4600342618 -0.4472135955 - 9 0.0000000000 0.0000000000 -1.0000000000 - 10 0.6745506270 0.5873512166 0.4472135955 - 11 -0.6745506270 -0.5873512166 -0.4472135955 - 12 -0.3501565946 0.8230372770 0.4472135955 - 13 -0.7295096215 0.4375185824 0.5257311121 - 14 -0.9745544804 0.2241507635 0.0000000000 - 15 -0.9201836497 -0.3914869741 0.0000000000 - 16 -0.6415357694 -0.5586042019 0.5257311121 - 17 -0.5236927392 -0.0462505911 0.8506508084 - 18 -0.5143339407 -0.8575899938 0.0000000000 - 19 0.0879738521 -0.9961227843 0.0000000000 - 20 0.3330187110 -0.7827549654 0.5257311121 - 21 -0.1178430302 -0.5123536108 0.8506508084 - 22 0.3964909105 0.3452363830 0.8506508084 - 23 -0.2058168823 0.4837691735 0.8506508084 - 24 0.4508617412 -0.2704013546 0.8506508084 - 25 0.9201836497 0.3914869741 0.0000000000 - 26 0.9745544804 -0.2241507635 0.0000000000 - 27 0.7295096215 -0.4375185824 -0.5257311121 - 28 0.5236927392 0.0462505911 -0.8506508084 - 29 0.6415357694 0.5586042019 -0.5257311121 - 30 0.5143339407 0.8575899938 0.0000000000 - 31 0.1178430302 0.5123536108 -0.8506508084 - 32 -0.3330187110 0.7827549654 -0.5257311121 - 33 -0.0879738521 0.9961227843 0.0000000000 - 34 -0.1906740282 -0.8290055565 -0.5257311121 - 35 0.2058168823 -0.4837691735 -0.8506508084 - 36 0.6566786235 -0.7541705281 0.0000000000 - 37 0.8473526517 0.0748350284 0.5257311121 - 38 -0.4508617412 0.2704013546 -0.8506508084 - 39 -0.8473526517 -0.0748350284 -0.5257311121 - 40 -0.6566786235 0.7541705281 0.0000000000 - 41 -0.3964909105 -0.3452363830 -0.8506508084 - 42 0.1906740282 0.8290055565 0.5257311121 - 43 -0.8434663484 0.1827424714 0.5051432551 - 44 -0.9690454298 0.0733970446 0.2357198099 - 45 -0.9411818011 -0.2421011948 0.2357198099 - 46 -0.7983820501 -0.3277444034 0.5051432551 - 47 -0.7379905791 -0.0651765777 0.6716561016 - 48 -0.1660650217 -0.7220114194 0.6716561016 - 49 -0.4344438541 -0.7457136377 0.5051432551 - 50 -0.3692562437 -0.8989360365 0.2357198099 - 51 -0.0605892524 -0.9699304685 0.2357198099 - 52 0.0649898290 -0.8605850417 0.5051432551 - 53 0.2310548507 -0.1385736223 0.9630218103 - 54 0.2031912220 0.1769246171 0.9630218103 - 55 -0.1054757693 0.2479190491 0.9630218103 - 56 -0.2683788324 -0.0237022183 0.9630218103 - 57 -0.0603914710 -0.2625678257 0.9630218103 - 58 0.7983820501 0.3277444034 -0.5051432551 - 59 0.9411818011 0.2421011948 -0.2357198099 - 60 0.9690454298 -0.0733970446 -0.2357198099 - 61 0.8434663484 -0.1827424714 -0.5051432551 - 62 0.7379905791 0.0651765777 -0.6716561016 - 63 0.3692562437 0.8989360365 -0.2357198099 - 64 0.4344438541 0.7457136377 -0.5051432551 - 65 0.1660650217 0.7220114194 -0.6716561016 - 66 -0.0649898290 0.8605850417 -0.5051432551 - 67 0.0605892524 0.9699304685 -0.2357198099 - 68 0.0868470176 -0.8586546962 -0.5051432551 - 69 0.2900382396 -0.6817300791 -0.6716561016 - 70 0.5584170720 -0.6580278608 -0.5051432551 - 71 0.5210930903 -0.8203037014 -0.2357198099 - 72 0.2296467686 -0.9442979047 -0.2357198099 - 73 0.7408325207 -0.6289700689 0.2357198099 - 74 0.9037355838 -0.3573488014 0.2357198099 - 75 0.8385479733 -0.2041264026 0.5051432551 - 76 0.6353567514 -0.3810510197 0.6716561016 - 77 0.5749652804 -0.6436188454 0.5051432551 - 78 -0.5749652804 0.6436188454 -0.5051432551 - 79 -0.6353567514 0.3810510197 -0.6716561016 - 80 -0.8385479733 0.2041264026 -0.5051432551 - 81 -0.9037355838 0.3573488014 -0.2357198099 - 82 -0.7408325207 0.6289700689 -0.2357198099 - 83 0.0603914710 0.2625678257 -0.9630218103 - 84 0.2683788324 0.0237022183 -0.9630218103 - 85 0.1054757693 -0.2479190491 -0.9630218103 - 86 -0.2031912220 -0.1769246171 -0.9630218103 - 87 -0.2310548507 0.1385736223 -0.9630218103 - 88 0.4532613197 0.7344279869 0.5051432551 - 89 0.5587370890 0.4865089377 0.6716561016 - 90 0.7897919397 0.3479353154 0.5051432551 - 91 0.8271159214 0.5102111560 0.2357198099 - 92 0.6191285600 0.7490767634 0.2357198099 - 93 -0.6191285600 -0.7490767634 -0.2357198099 - 94 -0.8271159214 -0.5102111560 -0.2357198099 - 95 -0.7897919397 -0.3479353154 -0.5051432551 - 96 -0.5587370890 -0.4865089377 -0.6716561016 - 97 -0.4532613197 -0.7344279869 -0.5051432551 - 98 -0.2296467686 0.9442979047 0.2357198099 - 99 -0.5210930903 0.8203037014 0.2357198099 - 100 -0.5584170720 0.6580278608 0.5051432551 - 101 -0.2900382396 0.6817300791 0.6716561016 - 102 -0.0868470176 0.8586546962 0.5051432551 - 103 -0.7389477008 0.6174931387 0.2695524424 - 104 -0.8927233180 0.3610907895 0.2695524424 - 105 -0.6485012287 0.2146202028 0.7303316540 - 106 -0.4947256115 0.4710225519 0.7303316540 - 107 -0.9607746082 0.0652198824 -0.2695524424 - 108 -0.9960073301 -0.0879636556 -0.0152247717 - 109 -0.8574905961 0.5142743275 0.0152247717 - 110 -0.7540831171 -0.6566025121 0.0152247717 - 111 -0.8156182708 -0.5119651523 0.2695524424 - 112 -0.9344721764 -0.2326010154 -0.2695524424 - 113 -0.4045138430 -0.5504400294 0.7303316540 - 114 -0.6008476888 -0.3249580433 0.7303316540 - 115 -0.6192844249 -0.7374471383 0.2695524424 - 116 -0.3951596413 0.2369943877 0.8875147988 - 117 -0.3475061014 -0.3025838584 0.8875147988 - 118 -0.3589234758 -0.8935968998 -0.2695524424 - 119 -0.2241247837 -0.9744415260 -0.0152247717 - 120 -0.0675510718 -0.9606135193 -0.2695524424 - 121 0.3914415994 -0.9200769971 0.0152247717 - 122 0.2348678876 -0.9339050038 0.2695524424 - 123 0.5099844947 -0.8168581860 0.2695524424 - 124 0.3984979249 -0.5548108497 0.7303316540 - 125 0.1233813177 -0.6718576676 0.7303316540 - 126 0.1803890593 -0.4240014966 0.8875147988 - 127 0.4589926712 0.0405365222 0.8875147988 - 128 0.6507991050 0.2075480669 0.7303316540 - 129 0.2950904458 0.6160659899 0.7303316540 - 130 0.1032840121 0.4490544452 0.8875147988 - 131 0.0037180419 0.6830826094 0.7303316540 - 132 0.6771015368 -0.0902728309 0.7303316540 - 133 0.9344721764 0.2326010154 0.2695524424 - 134 0.9960073301 0.0879636556 0.0152247717 - 135 0.8156182708 0.5119651523 -0.2695524424 - 136 0.7540831171 0.6566025121 -0.0152247717 - 137 0.8574905961 -0.5142743275 -0.0152247717 - 138 0.8927233180 -0.3610907895 -0.2695524424 - 139 0.9607746082 -0.0652198824 0.2695524424 - 140 0.4947256115 -0.4710225519 -0.7303316540 - 141 0.6485012287 -0.2146202028 -0.7303316540 - 142 0.7389477008 -0.6174931387 -0.2695524424 - 143 0.3475061014 0.3025838584 -0.8875147988 - 144 0.6008476888 0.3249580433 -0.7303316540 - 145 0.3951596413 -0.2369943877 -0.8875147988 - 146 0.6192844249 0.7374471383 -0.2695524424 - 147 0.4045138430 0.5504400294 -0.7303316540 - 148 0.2241247837 0.9744415260 0.0152247717 - 149 0.3589234758 0.8935968998 0.2695524424 - 150 -0.1803890593 0.4240014966 -0.8875147988 - 151 -0.1233813177 0.6718576676 -0.7303316540 - 152 -0.3984979249 0.5548108497 -0.7303316540 - 153 -0.5099844947 0.8168581860 -0.2695524424 - 154 -0.2348678876 0.9339050038 -0.2695524424 - 155 -0.3914415994 0.9200769971 -0.0152247717 - 156 0.0675510718 0.9606135193 0.2695524424 - 157 -0.2950904458 -0.6160659899 -0.7303316540 - 158 -0.0037180419 -0.6830826094 -0.7303316540 - 159 -0.1032840121 -0.4490544452 -0.8875147988 - 160 -0.4589926712 -0.0405365222 -0.8875147988 - 161 -0.6771015368 0.0902728309 -0.7303316540 - 162 -0.6507991050 -0.2075480669 -0.7303316540 diff --git a/grid_gen/global_scvt/namelist.input b/grid_gen/global_scvt/namelist.input deleted file mode 100644 index 12b2e45ad..000000000 --- a/grid_gen/global_scvt/namelist.input +++ /dev/null @@ -1,9 +0,0 @@ -&domains - np = 162 - locs_as_xyz = .true. - n_scvt_iterations = 100 - eps = 0.000000001 - l2_conv = .true. - inf_conv = .false. - min_dx = 120000.0 -/ diff --git a/grid_gen/global_scvt/refine/grid_ref.f b/grid_gen/global_scvt/refine/grid_ref.f deleted file mode 100644 index 11851adff..000000000 --- a/grid_gen/global_scvt/refine/grid_ref.f +++ /dev/null @@ -1,58 +0,0 @@ - program Grid_Refine - include "scvt.m" - -C The file "nodes_s.dat" must exist and read it - open(15,file='locs.dat.out',status='unknown') - read(15,*) N - do node = 1,N - read(15,200) X(node),Y(node),Z(node) - enddo - close(15) - print *,"Number of Starting Generators = ", N - -C Generate the Delaunay triangles - CALL TRMESH (N,X,Y,Z,LIST,LPTR,LEND,LNEW,IWK,IWK(N+1),DS,IER) - - DO NODE = 1,N - LPL = LEND(NODE) - LP = LPL - K = 0 -10 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NEIGH(NODE,K) = ND - IF (LP .NE. LPL) GOTO 10 - NEISZ(NODE) = K - ENDDO - - K=N+1 - -C Add points into the middle of edges - DO I = 1,N - DO J = 1,NEISZ(I) - IF (NEIGH(I,J).GT.I) then - X(K) = 0.5*(X(I)+X(NEIGH(I,J))) - Y(K) = 0.5*(Y(I)+Y(NEIGH(I,J))) - Z(K) = 0.5*(Z(I)+Z(NEIGH(I,J))) - R = sqrt(X(K)**2+Y(K)**2+Z(K)**2) - X(K) = X(K)/R - Y(K) = Y(K)/R - Z(K) = Z(K)/R - K=K+1 - ENDIF - ENDDO - ENDDO - NG=K-1 - print *,"Number of Final Generators = ",NG - - open(16,file='locs.dat.out.refined',status='unknown') - write(16,100) NG - do node = 1,NG - write(16,200) X(node),Y(node),Z(node) - enddo - close(16) -100 format(I10) -!200 format(I10,3X,F16.10,3X,F16.10,3X,F16.10) -200 format(10x,f22.10,f23.10,f23.10) - - end program Grid_Refine diff --git a/grid_gen/global_scvt/refine/scvt.m b/grid_gen/global_scvt/refine/scvt.m deleted file mode 100644 index 5bb2fe462..000000000 --- a/grid_gen/global_scvt/refine/scvt.m +++ /dev/null @@ -1,36 +0,0 @@ - IMPLICIT REAL(A-H,O-Z) - INTEGER IER, IFLAG, K, KSUM, KT, LIN, LOUT, LP, LPL, - . LPLT, LPLV, LW, LWK, LNEW, N, N0, N1, N2, N3, - . NA, NB, NCOL, NMAX, NN, NROW, NT, NT6, NTMX, NV - INTEGER NEARND - LOGICAL INSIDE, NUMBR - REAL A, AL, AREA, DEL, ELAT, ELON, P(3), PLTSIZ, SC - . VLAT, VLON, VNRM - REAL V1(3), V2(3), V3(3), V4(3), V5(3), V6(3) - REAL SNW(3), ANGLES(3) - - PARAMETER (NMAX=1000000, NTMX=2*NMAX, NT6=6*NMAX, LWK=2*NMAX, - . NCOL=NMAX, NROW=16) - -C Array storage for the triangulation, work space, and nodal -C coordinates. - - INTEGER LIST(NT6), LPTR(NT6), LEND(NMAX), IWK(LWK), - . NEIGH(NMAX,20), NEISZ(NMAX), NTRI(6*NMAX,3), - . MARK(NMAX) - REAL DS(NMAX), RLAT(NMAX), RLON(NMAX), HV(NMAX), AV(NMAX) - REAL X(NMAX), Y(NMAX), Z(NMAX), X1(NMAX), Y1(NMAX), Z1(NMAX) - -C Array storage for the Voronoi diagram: adjacency array, -C boundary triangle list, triangle circumcenters, and -C circumradii. - - INTEGER LISTC(NT6), LBTRI(6,NCOL) - INTEGER VORTX(NMAX,20), VORSZ(NMAX) - REAL XC(NTMX), YC(NTMX), ZC(NTMX), RC(NTMX) - -C Array storage for the triangle list. - - INTEGER LTRI(NROW,NTMX) - INTEGER I1MACH(2) - diff --git a/grid_gen/global_scvt/refine/svtgen.f b/grid_gen/global_scvt/refine/svtgen.f deleted file mode 100644 index 3fda62cee..000000000 --- a/grid_gen/global_scvt/refine/svtgen.f +++ /dev/null @@ -1,6729 +0,0 @@ - SUBROUTINE ADDNOD (NST,K,X,Y,Z, LIST,LPTR,LEND, - . LNEW, IER) - INTEGER NST, K, LIST(*), LPTR(*), LEND(K), LNEW, IER - REAL X(K), Y(K), Z(K) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/08/99 -C -C This subroutine adds node K to a triangulation of the -C convex hull of nodes 1,...,K-1, producing a triangulation -C of the convex hull of nodes 1,...,K. -C -C The algorithm consists of the following steps: node K -C is located relative to the triangulation (TRFIND), its -C index is added to the data structure (INTADD or BDYADD), -C and a sequence of swaps (SWPTST and SWAP) are applied to -C the arcs opposite K so that all arcs incident on node K -C and opposite node K are locally optimal (satisfy the cir- -C cumcircle test). Thus, if a Delaunay triangulation is -C input, a Delaunay triangulation will result. -C -C -C On input: -C -C NST = Index of a node at which TRFIND begins its -C search. Search time depends on the proximity -C of this node to K. If NST < 1, the search is -C begun at node K-1. -C -C K = Nodal index (index for X, Y, Z, and LEND) of the -C new node to be added. K .GE. 4. -C -C X,Y,Z = Arrays of length .GE. K containing Car- -C tesian coordinates of the nodes. -C (X(I),Y(I),Z(I)) defines node I for -C I = 1,...,K. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure associated with -C the triangulation of nodes 1 -C to K-1. The array lengths are -C assumed to be large enough to -C add node K. Refer to Subrou- -C tine TRMESH. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node K as the -C last entry unless IER .NE. 0 -C and IER .NE. -3, in which case -C the arrays are not altered. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = -1 if K is outside its valid range -C on input. -C IER = -2 if all nodes (including K) are col- -C linear (lie on a common geodesic). -C IER = L if nodes L and K coincide for some -C L < K. -C -C Modules required by ADDNOD: BDYADD, COVSPH, INSERT, -C INTADD, JRAND, LSTPTR, -C STORE, SWAP, SWPTST, -C TRFIND -C -C Intrinsic function called by ADDNOD: ABS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER I1, I2, I3, IO1, IO2, IN1, IST, KK, KM1, L, - . LP, LPF, LPO1, LPO1S - LOGICAL SWPTST - REAL B1, B2, B3, P(3) -C -C Local parameters: -C -C B1,B2,B3 = Unnormalized barycentric coordinates returned -C by TRFIND. -C I1,I2,I3 = Vertex indexes of a triangle containing K -C IN1 = Vertex opposite K: first neighbor of IO2 -C that precedes IO1. IN1,IO1,IO2 are in -C counterclockwise order. -C IO1,IO2 = Adjacent neighbors of K defining an arc to -C be tested for a swap -C IST = Index of node at which TRFIND begins its search -C KK = Local copy of K -C KM1 = K-1 -C L = Vertex index (I1, I2, or I3) returned in IER -C if node K coincides with a vertex -C LP = LIST pointer -C LPF = LIST pointer to the first neighbor of K -C LPO1 = LIST pointer to IO1 -C LPO1S = Saved value of LPO1 -C P = Cartesian coordinates of node K -C - KK = K - IF (KK .LT. 4) GO TO 3 -C -C Initialization: -C - KM1 = KK - 1 - IST = NST - IF (IST .LT. 1) IST = KM1 - P(1) = X(KK) - P(2) = Y(KK) - P(3) = Z(KK) -C -C Find a triangle (I1,I2,I3) containing K or the rightmost -C (I1) and leftmost (I2) visible boundary nodes as viewed -C from node K. -C - CALL TRFIND (IST,P,KM1,X,Y,Z,LIST,LPTR,LEND, B1,B2,B3, - . I1,I2,I3) -C -C Test for collinear or duplicate nodes. -C - IF (I1 .EQ. 0) GO TO 4 - IF (I3 .NE. 0) THEN - L = I1 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - L = I2 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - L = I3 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - CALL INTADD (KK,I1,I2,I3, LIST,LPTR,LEND,LNEW ) - ELSE - IF (I1 .NE. I2) THEN - CALL BDYADD (KK,I1,I2, LIST,LPTR,LEND,LNEW ) - ELSE - CALL COVSPH (KK,I1, LIST,LPTR,LEND,LNEW ) - ENDIF - ENDIF - IER = 0 -C -C Initialize variables for optimization of the -C triangulation. -C - LP = LEND(KK) - LPF = LPTR(LP) - IO2 = LIST(LPF) - LPO1 = LPTR(LPF) - IO1 = ABS(LIST(LPO1)) -C -C Begin loop: find the node opposite K. -C - 1 LP = LSTPTR(LEND(IO1),IO2,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 2 - LP = LPTR(LP) - IN1 = ABS(LIST(LP)) -C -C Swap test: if a swap occurs, two new arcs are -C opposite K and must be tested. -C - LPO1S = LPO1 - IF ( .NOT. SWPTST(IN1,KK,IO1,IO2,X,Y,Z) ) GO TO 2 - CALL SWAP (IN1,KK,IO1,IO2, LIST,LPTR,LEND, LPO1) - IF (LPO1 .EQ. 0) THEN -C -C A swap is not possible because KK and IN1 are already -C adjacent. This error in SWPTST only occurs in the -C neutral case and when there are nearly duplicate -C nodes. -C - LPO1 = LPO1S - GO TO 2 - ENDIF - IO1 = IN1 - GO TO 1 -C -C No swap occurred. Test for termination and reset -C IO2 and IO1. -C - 2 IF (LPO1 .EQ. LPF .OR. LIST(LPO1) .LT. 0) RETURN - IO2 = IO1 - LPO1 = LPTR(LPO1) - IO1 = ABS(LIST(LPO1)) - GO TO 1 -C -C KK < 4. -C - 3 IER = -1 - RETURN -C -C All nodes are collinear. -C - 4 IER = -2 - RETURN -C -C Nodes L and K coincide. -C - 5 IER = L - RETURN - END - REAL FUNCTION AREAS (V1,V2,V3) - REAL V1(3), V2(3), V3(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 09/18/90 -C -C This function returns the area of a spherical triangle -C on the unit sphere. -C -C -C On input: -C -C V1,V2,V3 = Arrays of length 3 containing the Carte- -C sian coordinates of unit vectors (the -C three triangle vertices in any order). -C These vectors, if nonzero, are implicitly -C scaled to have length 1. -C -C Input parameters are not altered by this function. -C -C On output: -C -C AREAS = Area of the spherical triangle defined by -C V1, V2, and V3 in the range 0 to 2*PI (the -C area of a hemisphere). AREAS = 0 (or 2*PI) -C if and only if V1, V2, and V3 lie in (or -C close to) a plane containing the origin. -C -C Modules required by AREAS: None -C -C Intrinsic functions called by AREAS: ACOS, DBLE, REAL, -C SQRT -C -C*********************************************************** -C - DOUBLE PRECISION A1, A2, A3, CA1, CA2, CA3, DV1(3), - . DV2(3), DV3(3), S12, S23, S31, - . U12(3), U23(3), U31(3) - INTEGER I -C -C Local parameters: -C -C A1,A2,A3 = Interior angles of the spherical triangle -C CA1,CA2,CA3 = cos(A1), cos(A2), and cos(A3), respectively -C DV1,DV2,DV3 = Double Precision copies of V1, V2, and V3 -C I = DO-loop index and index for Uij -C S12,S23,S31 = Sum of squared components of U12, U23, U31 -C U12,U23,U31 = Unit normal vectors to the planes defined by -C pairs of triangle vertices -C - DO 1 I = 1,3 - DV1(I) = DBLE(V1(I)) - DV2(I) = DBLE(V2(I)) - DV3(I) = DBLE(V3(I)) - 1 CONTINUE -C -C Compute cross products Uij = Vi X Vj. -C - U12(1) = DV1(2)*DV2(3) - DV1(3)*DV2(2) - U12(2) = DV1(3)*DV2(1) - DV1(1)*DV2(3) - U12(3) = DV1(1)*DV2(2) - DV1(2)*DV2(1) -C - U23(1) = DV2(2)*DV3(3) - DV2(3)*DV3(2) - U23(2) = DV2(3)*DV3(1) - DV2(1)*DV3(3) - U23(3) = DV2(1)*DV3(2) - DV2(2)*DV3(1) -C - U31(1) = DV3(2)*DV1(3) - DV3(3)*DV1(2) - U31(2) = DV3(3)*DV1(1) - DV3(1)*DV1(3) - U31(3) = DV3(1)*DV1(2) - DV3(2)*DV1(1) -C -C Normalize Uij to unit vectors. -C - S12 = 0.D0 - S23 = 0.D0 - S31 = 0.D0 - DO 2 I = 1,3 - S12 = S12 + U12(I)*U12(I) - S23 = S23 + U23(I)*U23(I) - S31 = S31 + U31(I)*U31(I) - 2 CONTINUE -C -C Test for a degenerate triangle associated with collinear -C vertices. -C - IF (S12 .EQ. 0.D0 .OR. S23 .EQ. 0.D0 .OR. - . S31 .EQ. 0.D0) THEN - AREAS = 0. - RETURN - ENDIF - S12 = SQRT(S12) - S23 = SQRT(S23) - S31 = SQRT(S31) - DO 3 I = 1,3 - U12(I) = U12(I)/S12 - U23(I) = U23(I)/S23 - U31(I) = U31(I)/S31 - 3 CONTINUE -C -C Compute interior angles Ai as the dihedral angles between -C planes: -C CA1 = cos(A1) = - -C CA2 = cos(A2) = - -C CA3 = cos(A3) = - -C - CA1 = -U12(1)*U31(1)-U12(2)*U31(2)-U12(3)*U31(3) - CA2 = -U23(1)*U12(1)-U23(2)*U12(2)-U23(3)*U12(3) - CA3 = -U31(1)*U23(1)-U31(2)*U23(2)-U31(3)*U23(3) - IF (CA1 .LT. -1.D0) CA1 = -1.D0 - IF (CA1 .GT. 1.D0) CA1 = 1.D0 - IF (CA2 .LT. -1.D0) CA2 = -1.D0 - IF (CA2 .GT. 1.D0) CA2 = 1.D0 - IF (CA3 .LT. -1.D0) CA3 = -1.D0 - IF (CA3 .GT. 1.D0) CA3 = 1.D0 - A1 = ACOS(CA1) - A2 = ACOS(CA2) - A3 = ACOS(CA3) -C -C Compute AREAS = A1 + A2 + A3 - PI. -C - AREAS = REAL(A1 + A2 + A3 - ACOS(-1.D0)) - IF (AREAS .LT. 0.) AREAS = 0. - RETURN - END - SUBROUTINE BDYADD (KK,I1,I2, LIST,LPTR,LEND,LNEW ) - INTEGER KK, I1, I2, LIST(*), LPTR(*), LEND(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/11/96 -C -C This subroutine adds a boundary node to a triangulation -C of a set of KK-1 points on the unit sphere. The data -C structure is updated with the insertion of node KK, but no -C optimization is performed. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C KK = Index of a node to be connected to the sequence -C of all visible boundary nodes. KK .GE. 1 and -C KK must not be equal to I1 or I2. -C -C I1 = First (rightmost as viewed from KK) boundary -C node in the triangulation that is visible from -C node KK (the line segment KK-I1 intersects no -C arcs. -C -C I2 = Last (leftmost) boundary node that is visible -C from node KK. I1 and I2 may be determined by -C Subroutine TRFIND. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C Nodes I1 and I2 must be in- -C cluded in the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK. Node -C KK is connected to I1, I2, and -C all boundary nodes in between. -C -C Module required by BDYADD: INSERT -C -C*********************************************************** -C - INTEGER K, LP, LSAV, N1, N2, NEXT, NSAV -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C LSAV = LIST pointer -C N1,N2 = Local copies of I1 and I2, respectively -C NEXT = Boundary node visible from K -C NSAV = Boundary node visible from K -C - K = KK - N1 = I1 - N2 = I2 -C -C Add K as the last neighbor of N1. -C - LP = LEND(N1) - LSAV = LPTR(LP) - LPTR(LP) = LNEW - LIST(LNEW) = -K - LPTR(LNEW) = LSAV - LEND(N1) = LNEW - LNEW = LNEW + 1 - NEXT = -LIST(LP) - LIST(LP) = NEXT - NSAV = NEXT -C -C Loop on the remaining boundary nodes between N1 and N2, -C adding K as the first neighbor. -C - 1 LP = LEND(NEXT) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - IF (NEXT .EQ. N2) GO TO 2 - NEXT = -LIST(LP) - LIST(LP) = NEXT - GO TO 1 -C -C Add the boundary nodes between N1 and N2 as neighbors -C of node K. -C - 2 LSAV = LNEW - LIST(LNEW) = N1 - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - NEXT = NSAV -C - 3 IF (NEXT .EQ. N2) GO TO 4 - LIST(LNEW) = NEXT - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - LP = LEND(NEXT) - NEXT = LIST(LP) - GO TO 3 -C - 4 LIST(LNEW) = -N2 - LPTR(LNEW) = LSAV - LEND(K) = LNEW - LNEW = LNEW + 1 - RETURN - END - SUBROUTINE BNODES (N,LIST,LPTR,LEND, NODES,NB,NA,NT) - INTEGER N, LIST(*), LPTR(*), LEND(N), NODES(*), NB, - . NA, NT -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/26/96 -C -C Given a triangulation of N nodes on the unit sphere -C created by Subroutine TRMESH, this subroutine returns an -C array containing the indexes (if any) of the counterclock- -C wise-ordered sequence of boundary nodes -- the nodes on -C the boundary of the convex hull of the set of nodes. (The -C boundary is empty if the nodes do not lie in a single -C hemisphere.) The numbers of boundary nodes, arcs, and -C triangles are also returned. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C The above parameters are not altered by this routine. -C -C NODES = Integer array of length at least NB -C (NB .LE. N). -C -C On output: -C -C NODES = Ordered sequence of boundary node indexes -C in the range 1 to N (in the first NB loca- -C tions). -C -C NB = Number of boundary nodes. -C -C NA,NT = Number of arcs and triangles, respectively, -C in the triangulation. -C -C Modules required by BNODES: None -C -C*********************************************************** -C - INTEGER K, LP, N0, NN, NST -C -C Local parameters: -C -C K = NODES index -C LP = LIST pointer -C N0 = Boundary node to be added to NODES -C NN = Local copy of N -C NST = First element of nodes (arbitrarily chosen to be -C the one with smallest index) -C - NN = N -C -C Search for a boundary node. -C - DO 1 NST = 1,NN - LP = LEND(NST) - IF (LIST(LP) .LT. 0) GO TO 2 - 1 CONTINUE -C -C The triangulation contains no boundary nodes. -C - NB = 0 - NA = 3*(NN-2) - NT = 2*(NN-2) - RETURN -C -C NST is the first boundary node encountered. Initialize -C for traversal of the boundary. -C - 2 NODES(1) = NST - K = 1 - N0 = NST -C -C Traverse the boundary in counterclockwise order. -C - 3 LP = LEND(N0) - LP = LPTR(LP) - N0 = LIST(LP) - IF (N0 .EQ. NST) GO TO 4 - K = K + 1 - NODES(K) = N0 - GO TO 3 -C -C Store the counts. -C - 4 NB = K - NT = 2*N - NB - 2 - NA = NT + N - 1 - RETURN - END - SUBROUTINE CIRCUM (V1,V2,V3, C,IER) - INTEGER IER - REAL V1(3), V2(3), V3(3), C(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/29/95 -C -C This subroutine returns the circumcenter of a spherical -C triangle on the unit sphere: the point on the sphere sur- -C face that is equally distant from the three triangle -C vertices and lies in the same hemisphere, where distance -C is taken to be arc-length on the sphere surface. -C -C -C On input: -C -C V1,V2,V3 = Arrays of length 3 containing the Carte- -C sian coordinates of the three triangle -C vertices (unit vectors) in CCW order. -C -C The above parameters are not altered by this routine. -C -C C = Array of length 3. -C -C On output: -C -C C = Cartesian coordinates of the circumcenter unless -C IER > 0, in which case C is not defined. C = -C (V2-V1) X (V3-V1) normalized to a unit vector. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if V1, V2, and V3 lie on a common -C line: (V2-V1) X (V3-V1) = 0. -C (The vertices are not tested for validity.) -C -C Modules required by CIRCUM: None -C -C Intrinsic function called by CIRCUM: SQRT -C -C*********************************************************** -C - INTEGER I - REAL CNORM, CU(3), E1(3), E2(3) -C -C Local parameters: -C -C CNORM = Norm of CU: used to compute C -C CU = Scalar multiple of C: E1 X E2 -C E1,E2 = Edges of the underlying planar triangle: -C V2-V1 and V3-V1, respectively -C I = DO-loop index -C - DO 1 I = 1,3 - E1(I) = V2(I) - V1(I) - E2(I) = V3(I) - V1(I) - 1 CONTINUE -C -C Compute CU = E1 X E2 and CNORM**2. -C - CU(1) = E1(2)*E2(3) - E1(3)*E2(2) - CU(2) = E1(3)*E2(1) - E1(1)*E2(3) - CU(3) = E1(1)*E2(2) - E1(2)*E2(1) - CNORM = CU(1)*CU(1) + CU(2)*CU(2) + CU(3)*CU(3) -C -C The vertices lie on a common line if and only if CU is -C the zero vector. -C - IF (CNORM .NE. 0.) THEN -C -C No error: compute C. -C - CNORM = SQRT(CNORM) - DO 2 I = 1,3 - C(I) = CU(I)/CNORM - 2 CONTINUE - IER = 0 - ELSE -C -C CU = 0. -C - IER = 1 - ENDIF - RETURN - END - SUBROUTINE COVSPH (KK,N0, LIST,LPTR,LEND,LNEW ) - INTEGER KK, N0, LIST(*), LPTR(*), LEND(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine connects an exterior node KK to all -C boundary nodes of a triangulation of KK-1 points on the -C unit sphere, producing a triangulation that covers the -C sphere. The data structure is updated with the addition -C of node KK, but no optimization is performed. All boun- -C dary nodes must be visible from node KK. -C -C -C On input: -C -C KK = Index of the node to be connected to the set of -C all boundary nodes. KK .GE. 4. -C -C N0 = Index of a boundary node (in the range 1 to -C KK-1). N0 may be determined by Subroutine -C TRFIND. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C Node N0 must be included in -C the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK as the -C last entry. The updated -C triangulation contains no -C boundary nodes. -C -C Module required by COVSPH: INSERT -C -C*********************************************************** -C - INTEGER K, LP, LSAV, NEXT, NST -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C LSAV = LIST pointer -C NEXT = Boundary node visible from K -C NST = Local copy of N0 -C - K = KK - NST = N0 -C -C Traverse the boundary in clockwise order, inserting K as -C the first neighbor of each boundary node, and converting -C the boundary node to an interior node. -C - NEXT = NST - 1 LP = LEND(NEXT) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - NEXT = -LIST(LP) - LIST(LP) = NEXT - IF (NEXT .NE. NST) GO TO 1 -C -C Traverse the boundary again, adding each node to K's -C adjacency list. -C - LSAV = LNEW - 2 LP = LEND(NEXT) - LIST(LNEW) = NEXT - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - NEXT = LIST(LP) - IF (NEXT .NE. NST) GO TO 2 -C - LPTR(LNEW-1) = LSAV - LEND(K) = LNEW - 1 - RETURN - END - SUBROUTINE CRLIST (N,NCOL,X,Y,Z,LIST,LEND, LPTR,LNEW, - . LTRI, LISTC,NB,XC,YC,ZC,RC,IER) - INTEGER N, NCOL, LIST(*), LEND(N), LPTR(*), LNEW, - . LTRI(6,NCOL), LISTC(*), NB, IER - REAL X(N), Y(N), Z(N), XC(*), YC(*), ZC(*), RC(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/05/98 -C -C Given a Delaunay triangulation of nodes on the surface -C of the unit sphere, this subroutine returns the set of -C triangle circumcenters corresponding to Voronoi vertices, -C along with the circumradii and a list of triangle indexes -C LISTC stored in one-to-one correspondence with LIST/LPTR -C entries. -C -C A triangle circumcenter is the point (unit vector) lying -C at the same angular distance from the three vertices and -C contained in the same hemisphere as the vertices. (Note -C that the negative of a circumcenter is also equidistant -C from the vertices.) If the triangulation covers the sur- -C face, the Voronoi vertices are the circumcenters of the -C triangles in the Delaunay triangulation. LPTR, LEND, and -C LNEW are not altered in this case. -C -C On the other hand, if the nodes are contained in a sin- -C gle hemisphere, the triangulation is implicitly extended -C to the entire surface by adding pseudo-arcs (of length -C greater than 180 degrees) between boundary nodes forming -C pseudo-triangles whose 'circumcenters' are included in the -C list. This extension to the triangulation actually con- -C sists of a triangulation of the set of boundary nodes in -C which the swap test is reversed (a non-empty circumcircle -C test). The negative circumcenters are stored as the -C pseudo-triangle 'circumcenters'. LISTC, LPTR, LEND, and -C LNEW contain a data structure corresponding to the ex- -C tended triangulation (Voronoi diagram), but LIST is not -C altered in this case. Thus, if it is necessary to retain -C the original (unextended) triangulation data structure, -C copies of LPTR and LNEW must be saved before calling this -C routine. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C Note that, if N = 3, there are only two Voronoi -C vertices separated by 180 degrees, and the -C Voronoi regions are not well defined. -C -C NCOL = Number of columns reserved for LTRI. This -C must be at least NB-2, where NB is the number -C of boundary nodes. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C LIST = Integer array containing the set of adjacency -C lists. Refer to Subroutine TRMESH. -C -C LEND = Set of pointers to ends of adjacency lists. -C Refer to Subroutine TRMESH. -C -C The above parameters are not altered by this routine. -C -C LPTR = Array of pointers associated with LIST. Re- -C fer to Subroutine TRMESH. -C -C LNEW = Pointer to the first empty location in LIST -C and LPTR (list length plus one). -C -C LTRI = Integer work space array dimensioned 6 by -C NCOL, or unused dummy parameter if NB = 0. -C -C LISTC = Integer array of length at least 3*NT, where -C NT = 2*N-4 is the number of triangles in the -C triangulation (after extending it to cover -C the entire surface if necessary). -C -C XC,YC,ZC,RC = Arrays of length NT = 2*N-4. -C -C On output: -C -C LPTR = Array of pointers associated with LISTC: -C updated for the addition of pseudo-triangles -C if the original triangulation contains -C boundary nodes (NB > 0). -C -C LNEW = Pointer to the first empty location in LISTC -C and LPTR (list length plus one). LNEW is not -C altered if NB = 0. -C -C LTRI = Triangle list whose first NB-2 columns con- -C tain the indexes of a clockwise-ordered -C sequence of vertices (first three rows) -C followed by the LTRI column indexes of the -C triangles opposite the vertices (or 0 -C denoting the exterior region) in the last -C three rows. This array is not generally of -C any use. -C -C LISTC = Array containing triangle indexes (indexes -C to XC, YC, ZC, and RC) stored in 1-1 corres- -C pondence with LIST/LPTR entries (or entries -C that would be stored in LIST for the -C extended triangulation): the index of tri- -C angle (N1,N2,N3) is stored in LISTC(K), -C LISTC(L), and LISTC(M), where LIST(K), -C LIST(L), and LIST(M) are the indexes of N2 -C as a neighbor of N1, N3 as a neighbor of N2, -C and N1 as a neighbor of N3. The Voronoi -C region associated with a node is defined by -C the CCW-ordered sequence of circumcenters in -C one-to-one correspondence with its adjacency -C list (in the extended triangulation). -C -C NB = Number of boundary nodes unless IER = 1. -C -C XC,YC,ZC = Arrays containing the Cartesian coordi- -C nates of the triangle circumcenters -C (Voronoi vertices). XC(I)**2 + YC(I)**2 -C + ZC(I)**2 = 1. The first NB-2 entries -C correspond to pseudo-triangles if NB > 0. -C -C RC = Array containing circumradii (the arc lengths -C or angles between the circumcenters and associ- -C ated triangle vertices) in 1-1 correspondence -C with circumcenters. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if N < 3. -C IER = 2 if NCOL < NB-2. -C IER = 3 if a triangle is degenerate (has ver- -C tices lying on a common geodesic). -C -C Modules required by CRLIST: CIRCUM, LSTPTR, SWPTST -C -C Intrinsic functions called by CRLIST: ABS, ACOS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER I1, I2, I3, I4, IERR, KT, KT1, KT2, KT11, - . KT12, KT21, KT22, LP, LPL, LPN, N0, N1, N2, - . N3, N4, NM2, NN, NT - LOGICAL SWPTST - LOGICAL SWP - REAL C(3), T, V1(3), V2(3), V3(3) -C -C Local parameters: -C -C C = Circumcenter returned by Subroutine CIRCUM -C I1,I2,I3 = Permutation of (1,2,3): LTRI row indexes -C I4 = LTRI row index in the range 1 to 3 -C IERR = Error flag for calls to CIRCUM -C KT = Triangle index -C KT1,KT2 = Indexes of a pair of adjacent pseudo-triangles -C KT11,KT12 = Indexes of the pseudo-triangles opposite N1 -C and N2 as vertices of KT1 -C KT21,KT22 = Indexes of the pseudo-triangles opposite N1 -C and N2 as vertices of KT2 -C LP,LPN = LIST pointers -C LPL = LIST pointer of the last neighbor of N1 -C N0 = Index of the first boundary node (initial -C value of N1) in the loop on boundary nodes -C used to store the pseudo-triangle indexes -C in LISTC -C N1,N2,N3 = Nodal indexes defining a triangle (CCW order) -C or pseudo-triangle (clockwise order) -C N4 = Index of the node opposite N2 -> N1 -C NM2 = N-2 -C NN = Local copy of N -C NT = Number of pseudo-triangles: NB-2 -C SWP = Logical variable set to TRUE in each optimiza- -C tion loop (loop on pseudo-arcs) iff a swap -C is performed -C V1,V2,V3 = Vertices of triangle KT = (N1,N2,N3) sent to -C Subroutine CIRCUM -C - NN = N - NB = 0 - NT = 0 - IF (NN .LT. 3) GO TO 21 -C -C Search for a boundary node N1. -C - DO 1 N1 = 1,NN - LP = LEND(N1) - IF (LIST(LP) .LT. 0) GO TO 2 - 1 CONTINUE -C -C The triangulation already covers the sphere. -C - GO TO 9 -C -C There are NB .GE. 3 boundary nodes. Add NB-2 pseudo- -C triangles (N1,N2,N3) by connecting N3 to the NB-3 -C boundary nodes to which it is not already adjacent. -C -C Set N3 and N2 to the first and last neighbors, -C respectively, of N1. -C - 2 N2 = -LIST(LP) - LP = LPTR(LP) - N3 = LIST(LP) -C -C Loop on boundary arcs N1 -> N2 in clockwise order, -C storing triangles (N1,N2,N3) in column NT of LTRI -C along with the indexes of the triangles opposite -C the vertices. -C - 3 NT = NT + 1 - IF (NT .LE. NCOL) THEN - LTRI(1,NT) = N1 - LTRI(2,NT) = N2 - LTRI(3,NT) = N3 - LTRI(4,NT) = NT + 1 - LTRI(5,NT) = NT - 1 - LTRI(6,NT) = 0 - ENDIF - N1 = N2 - LP = LEND(N1) - N2 = -LIST(LP) - IF (N2 .NE. N3) GO TO 3 -C - NB = NT + 2 - IF (NCOL .LT. NT) GO TO 22 - LTRI(4,NT) = 0 - IF (NT .EQ. 1) GO TO 7 -C -C Optimize the exterior triangulation (set of pseudo- -C triangles) by applying swaps to the pseudo-arcs N1-N2 -C (pairs of adjacent pseudo-triangles KT1 and KT2 > KT1). -C The loop on pseudo-arcs is repeated until no swaps are -C performed. -C - 4 SWP = .FALSE. - DO 6 KT1 = 1,NT-1 - DO 5 I3 = 1,3 - KT2 = LTRI(I3+3,KT1) - IF (KT2 .LE. KT1) GO TO 5 -C -C The LTRI row indexes (I1,I2,I3) of triangle KT1 = -C (N1,N2,N3) are a cyclical permutation of (1,2,3). -C - IF (I3 .EQ. 1) THEN - I1 = 2 - I2 = 3 - ELSEIF (I3 .EQ. 2) THEN - I1 = 3 - I2 = 1 - ELSE - I1 = 1 - I2 = 2 - ENDIF - N1 = LTRI(I1,KT1) - N2 = LTRI(I2,KT1) - N3 = LTRI(I3,KT1) -C -C KT2 = (N2,N1,N4) for N4 = LTRI(I,KT2), where -C LTRI(I+3,KT2) = KT1. -C - IF (LTRI(4,KT2) .EQ. KT1) THEN - I4 = 1 - ELSEIF (LTRI(5,KT2) .EQ. KT1) THEN - I4 = 2 - ELSE - I4 = 3 - ENDIF - N4 = LTRI(I4,KT2) -C -C The empty circumcircle test is reversed for the pseudo- -C triangles. The reversal is implicit in the clockwise -C ordering of the vertices. -C - IF ( .NOT. SWPTST(N1,N2,N3,N4,X,Y,Z) ) GO TO 5 -C -C Swap arc N1-N2 for N3-N4. KTij is the triangle opposite -C Nj as a vertex of KTi. -C - SWP = .TRUE. - KT11 = LTRI(I1+3,KT1) - KT12 = LTRI(I2+3,KT1) - IF (I4 .EQ. 1) THEN - I2 = 2 - I1 = 3 - ELSEIF (I4 .EQ. 2) THEN - I2 = 3 - I1 = 1 - ELSE - I2 = 1 - I1 = 2 - ENDIF - KT21 = LTRI(I1+3,KT2) - KT22 = LTRI(I2+3,KT2) - LTRI(1,KT1) = N4 - LTRI(2,KT1) = N3 - LTRI(3,KT1) = N1 - LTRI(4,KT1) = KT12 - LTRI(5,KT1) = KT22 - LTRI(6,KT1) = KT2 - LTRI(1,KT2) = N3 - LTRI(2,KT2) = N4 - LTRI(3,KT2) = N2 - LTRI(4,KT2) = KT21 - LTRI(5,KT2) = KT11 - LTRI(6,KT2) = KT1 -C -C Correct the KT11 and KT22 entries that changed. -C - IF (KT11 .NE. 0) THEN - I4 = 4 - IF (LTRI(4,KT11) .NE. KT1) THEN - I4 = 5 - IF (LTRI(5,KT11) .NE. KT1) I4 = 6 - ENDIF - LTRI(I4,KT11) = KT2 - ENDIF - IF (KT22 .NE. 0) THEN - I4 = 4 - IF (LTRI(4,KT22) .NE. KT2) THEN - I4 = 5 - IF (LTRI(5,KT22) .NE. KT2) I4 = 6 - ENDIF - LTRI(I4,KT22) = KT1 - ENDIF - 5 CONTINUE - 6 CONTINUE - IF (SWP) GO TO 4 -C -C Compute and store the negative circumcenters and radii of -C the pseudo-triangles in the first NT positions. -C - 7 DO 8 KT = 1,NT - N1 = LTRI(1,KT) - N2 = LTRI(2,KT) - N3 = LTRI(3,KT) - V1(1) = X(N1) - V1(2) = Y(N1) - V1(3) = Z(N1) - V2(1) = X(N2) - V2(2) = Y(N2) - V2(3) = Z(N2) - V3(1) = X(N3) - V3(2) = Y(N3) - V3(3) = Z(N3) - CALL CIRCUM (V1,V2,V3, C,IERR) - IF (IERR .NE. 0) GO TO 23 -C -C Store the negative circumcenter and radius (computed -C from ). -C - XC(KT) = C(1) - YC(KT) = C(2) - ZC(KT) = C(3) - T = V1(1)*C(1) + V1(2)*C(2) + V1(3)*C(3) - IF (T .LT. -1.0) T = -1.0 - IF (T .GT. 1.0) T = 1.0 - RC(KT) = ACOS(T) - 8 CONTINUE -C -C Compute and store the circumcenters and radii of the -C actual triangles in positions KT = NT+1, NT+2, ... -C Also, store the triangle indexes KT in the appropriate -C LISTC positions. -C - 9 KT = NT -C -C Loop on nodes N1. -C - NM2 = NN - 2 - DO 12 N1 = 1,NM2 - LPL = LEND(N1) - LP = LPL - N3 = LIST(LP) -C -C Loop on adjacent neighbors N2,N3 of N1 for which N2 > N1 -C and N3 > N1. -C - 10 LP = LPTR(LP) - N2 = N3 - N3 = ABS(LIST(LP)) - IF (N2 .LE. N1 .OR. N3 .LE. N1) GO TO 11 - KT = KT + 1 -C -C Compute the circumcenter C of triangle KT = (N1,N2,N3). -C - V1(1) = X(N1) - V1(2) = Y(N1) - V1(3) = Z(N1) - V2(1) = X(N2) - V2(2) = Y(N2) - V2(3) = Z(N2) - V3(1) = X(N3) - V3(2) = Y(N3) - V3(3) = Z(N3) - CALL CIRCUM (V1,V2,V3, C,IERR) - IF (IERR .NE. 0) GO TO 23 -C -C Store the circumcenter, radius and triangle index. -C - XC(KT) = C(1) - YC(KT) = C(2) - ZC(KT) = C(3) - T = V1(1)*C(1) + V1(2)*C(2) + V1(3)*C(3) - IF (T .LT. -1.0) T = -1.0 - IF (T .GT. 1.0) T = 1.0 - RC(KT) = ACOS(T) -C -C Store KT in LISTC(LPN), where Abs(LIST(LPN)) is the -C index of N2 as a neighbor of N1, N3 as a neighbor -C of N2, and N1 as a neighbor of N3. -C - LPN = LSTPTR(LPL,N2,LIST,LPTR) - LISTC(LPN) = KT - LPN = LSTPTR(LEND(N2),N3,LIST,LPTR) - LISTC(LPN) = KT - LPN = LSTPTR(LEND(N3),N1,LIST,LPTR) - LISTC(LPN) = KT - 11 IF (LP .NE. LPL) GO TO 10 - 12 CONTINUE - IF (NT .EQ. 0) GO TO 20 -C -C Store the first NT triangle indexes in LISTC. -C -C Find a boundary triangle KT1 = (N1,N2,N3) with a -C boundary arc opposite N3. -C - KT1 = 0 - 13 KT1 = KT1 + 1 - IF (LTRI(4,KT1) .EQ. 0) THEN - I1 = 2 - I2 = 3 - I3 = 1 - GO TO 14 - ELSEIF (LTRI(5,KT1) .EQ. 0) THEN - I1 = 3 - I2 = 1 - I3 = 2 - GO TO 14 - ELSEIF (LTRI(6,KT1) .EQ. 0) THEN - I1 = 1 - I2 = 2 - I3 = 3 - GO TO 14 - ENDIF - GO TO 13 - 14 N1 = LTRI(I1,KT1) - N0 = N1 -C -C Loop on boundary nodes N1 in CCW order, storing the -C indexes of the clockwise-ordered sequence of triangles -C that contain N1. The first triangle overwrites the -C last neighbor position, and the remaining triangles, -C if any, are appended to N1's adjacency list. -C -C A pointer to the first neighbor of N1 is saved in LPN. -C - 15 LP = LEND(N1) - LPN = LPTR(LP) - LISTC(LP) = KT1 -C -C Loop on triangles KT2 containing N1. -C - 16 KT2 = LTRI(I2+3,KT1) - IF (KT2 .NE. 0) THEN -C -C Append KT2 to N1's triangle list. -C - LPTR(LP) = LNEW - LP = LNEW - LISTC(LP) = KT2 - LNEW = LNEW + 1 -C -C Set KT1 to KT2 and update (I1,I2,I3) such that -C LTRI(I1,KT1) = N1. -C - KT1 = KT2 - IF (LTRI(1,KT1) .EQ. N1) THEN - I1 = 1 - I2 = 2 - I3 = 3 - ELSEIF (LTRI(2,KT1) .EQ. N1) THEN - I1 = 2 - I2 = 3 - I3 = 1 - ELSE - I1 = 3 - I2 = 1 - I3 = 2 - ENDIF - GO TO 16 - ENDIF -C -C Store the saved first-triangle pointer in LPTR(LP), set -C N1 to the next boundary node, test for termination, -C and permute the indexes: the last triangle containing -C a boundary node is the first triangle containing the -C next boundary node. -C - LPTR(LP) = LPN - N1 = LTRI(I3,KT1) - IF (N1 .NE. N0) THEN - I4 = I3 - I3 = I2 - I2 = I1 - I1 = I4 - GO TO 15 - ENDIF -C -C No errors encountered. -C - 20 IER = 0 - RETURN -C -C N < 3. -C - 21 IER = 1 - RETURN -C -C Insufficient space reserved for LTRI. -C - 22 IER = 2 - RETURN -C -C Error flag returned by CIRCUM: KT indexes a null triangle. -C - 23 IER = 3 - RETURN - END - SUBROUTINE DELARC (N,IO1,IO2, LIST,LPTR,LEND, - . LNEW, IER) - INTEGER N, IO1, IO2, LIST(*), LPTR(*), LEND(N), LNEW, - . IER -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine deletes a boundary arc from a triangula- -C tion. It may be used to remove a null triangle from the -C convex hull boundary. Note, however, that if the union of -C triangles is rendered nonconvex, Subroutines DELNOD, EDGE, -C and TRFIND (and hence ADDNOD) may fail. Also, Function -C NEARND should not be called following an arc deletion. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 4. -C -C IO1,IO2 = Indexes (in the range 1 to N) of a pair of -C adjacent boundary nodes defining the arc -C to be removed. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the removal of arc IO1-IO2 -C unless IER > 0. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if N, IO1, or IO2 is outside its valid -C range, or IO1 = IO2. -C IER = 2 if IO1-IO2 is not a boundary arc. -C IER = 3 if the node opposite IO1-IO2 is al- -C ready a boundary node, and thus IO1 -C or IO2 has only two neighbors or a -C deletion would result in two triangu- -C lations sharing a single node. -C IER = 4 if one of the nodes is a neighbor of -C the other, but not vice versa, imply- -C ing an invalid triangulation data -C structure. -C -C Module required by DELARC: DELNB, LSTPTR -C -C Intrinsic function called by DELARC: ABS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER LP, LPH, LPL, N1, N2, N3 -C -C Local parameters: -C -C LP = LIST pointer -C LPH = LIST pointer or flag returned by DELNB -C LPL = Pointer to the last neighbor of N1, N2, or N3 -C N1,N2,N3 = Nodal indexes of a triangle such that N1->N2 -C is the directed boundary edge associated -C with IO1-IO2 -C - N1 = IO1 - N2 = IO2 -C -C Test for errors, and set N1->N2 to the directed boundary -C edge associated with IO1-IO2: (N1,N2,N3) is a triangle -C for some N3. -C - IF (N .LT. 4 .OR. N1 .LT. 1 .OR. N1 .GT. N .OR. - . N2 .LT. 1 .OR. N2 .GT. N .OR. N1 .EQ. N2) THEN - IER = 1 - RETURN - ENDIF -C - LPL = LEND(N2) - IF (-LIST(LPL) .NE. N1) THEN - N1 = N2 - N2 = IO1 - LPL = LEND(N2) - IF (-LIST(LPL) .NE. N1) THEN - IER = 2 - RETURN - ENDIF - ENDIF -C -C Set N3 to the node opposite N1->N2 (the second neighbor -C of N1), and test for error 3 (N3 already a boundary -C node). -C - LPL = LEND(N1) - LP = LPTR(LPL) - LP = LPTR(LP) - N3 = ABS(LIST(LP)) - LPL = LEND(N3) - IF (LIST(LPL) .LE. 0) THEN - IER = 3 - RETURN - ENDIF -C -C Delete N2 as a neighbor of N1, making N3 the first -C neighbor, and test for error 4 (N2 not a neighbor -C of N1). Note that previously computed pointers may -C no longer be valid following the call to DELNB. -C - CALL DELNB (N1,N2,N, LIST,LPTR,LEND,LNEW, LPH) - IF (LPH .LT. 0) THEN - IER = 4 - RETURN - ENDIF -C -C Delete N1 as a neighbor of N2, making N3 the new last -C neighbor. -C - CALL DELNB (N2,N1,N, LIST,LPTR,LEND,LNEW, LPH) -C -C Make N3 a boundary node with first neighbor N2 and last -C neighbor N1. -C - LP = LSTPTR(LEND(N3),N1,LIST,LPTR) - LEND(N3) = LP - LIST(LP) = -N1 -C -C No errors encountered. -C - IER = 0 - RETURN - END - SUBROUTINE DELNB (N0,NB,N, LIST,LPTR,LEND,LNEW, LPH) - INTEGER N0, NB, N, LIST(*), LPTR(*), LEND(N), LNEW, - . LPH -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/29/98 -C -C This subroutine deletes a neighbor NB from the adjacency -C list of node N0 (but N0 is not deleted from the adjacency -C list of NB) and, if NB is a boundary node, makes N0 a -C boundary node. For pointer (LIST index) LPH to NB as a -C neighbor of N0, the empty LIST,LPTR location LPH is filled -C in with the values at LNEW-1, pointer LNEW-1 (in LPTR and -C possibly in LEND) is changed to LPH, and LNEW is decremen- -C ted. This requires a search of LEND and LPTR entailing an -C expected operation count of O(N). -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C N0,NB = Indexes, in the range 1 to N, of a pair of -C nodes such that NB is a neighbor of N0. -C (N0 need not be a neighbor of NB.) -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the removal of NB from the ad- -C jacency list of N0 unless -C LPH < 0. -C -C LPH = List pointer to the hole (NB as a neighbor of -C N0) filled in by the values at LNEW-1 or error -C indicator: -C LPH > 0 if no errors were encountered. -C LPH = -1 if N0, NB, or N is outside its valid -C range. -C LPH = -2 if NB is not a neighbor of N0. -C -C Modules required by DELNB: None -C -C Intrinsic function called by DELNB: ABS -C -C*********************************************************** -C - INTEGER I, LNW, LP, LPB, LPL, LPP, NN -C -C Local parameters: -C -C I = DO-loop index -C LNW = LNEW-1 (output value of LNEW) -C LP = LIST pointer of the last neighbor of NB -C LPB = Pointer to NB as a neighbor of N0 -C LPL = Pointer to the last neighbor of N0 -C LPP = Pointer to the neighbor of N0 that precedes NB -C NN = Local copy of N -C - NN = N -C -C Test for error 1. -C - IF (N0 .LT. 1 .OR. N0 .GT. NN .OR. NB .LT. 1 .OR. - . NB .GT. NN .OR. NN .LT. 3) THEN - LPH = -1 - RETURN - ENDIF -C -C Find pointers to neighbors of N0: -C -C LPL points to the last neighbor, -C LPP points to the neighbor NP preceding NB, and -C LPB points to NB. -C - LPL = LEND(N0) - LPP = LPL - LPB = LPTR(LPP) - 1 IF (LIST(LPB) .EQ. NB) GO TO 2 - LPP = LPB - LPB = LPTR(LPP) - IF (LPB .NE. LPL) GO TO 1 -C -C Test for error 2 (NB not found). -C - IF (ABS(LIST(LPB)) .NE. NB) THEN - LPH = -2 - RETURN - ENDIF -C -C NB is the last neighbor of N0. Make NP the new last -C neighbor and, if NB is a boundary node, then make N0 -C a boundary node. -C - LEND(N0) = LPP - LP = LEND(NB) - IF (LIST(LP) .LT. 0) LIST(LPP) = -LIST(LPP) - GO TO 3 -C -C NB is not the last neighbor of N0. If NB is a boundary -C node and N0 is not, then make N0 a boundary node with -C last neighbor NP. -C - 2 LP = LEND(NB) - IF (LIST(LP) .LT. 0 .AND. LIST(LPL) .GT. 0) THEN - LEND(N0) = LPP - LIST(LPP) = -LIST(LPP) - ENDIF -C -C Update LPTR so that the neighbor following NB now fol- -C lows NP, and fill in the hole at location LPB. -C - 3 LPTR(LPP) = LPTR(LPB) - LNW = LNEW-1 - LIST(LPB) = LIST(LNW) - LPTR(LPB) = LPTR(LNW) - DO 4 I = NN,1,-1 - IF (LEND(I) .EQ. LNW) THEN - LEND(I) = LPB - GO TO 5 - ENDIF - 4 CONTINUE -C - 5 DO 6 I = 1,LNW-1 - IF (LPTR(I) .EQ. LNW) THEN - LPTR(I) = LPB - ENDIF - 6 CONTINUE -C -C No errors encountered. -C - LNEW = LNW - LPH = LPB - RETURN - END - SUBROUTINE DELNOD (K, N,X,Y,Z,LIST,LPTR,LEND,LNEW,LWK, - . IWK, IER) - INTEGER K, N, LIST(*), LPTR(*), LEND(*), LNEW, LWK, - . IWK(2,*), IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 11/30/99 -C -C This subroutine deletes node K (along with all arcs -C incident on node K) from a triangulation of N nodes on the -C unit sphere, and inserts arcs as necessary to produce a -C triangulation of the remaining N-1 nodes. If a Delaunay -C triangulation is input, a Delaunay triangulation will -C result, and thus, DELNOD reverses the effect of a call to -C Subroutine ADDNOD. -C -C -C On input: -C -C K = Index (for X, Y, and Z) of the node to be -C deleted. 1 .LE. K .LE. N. -C -C K is not altered by this routine. -C -C N = Number of nodes in the triangulation on input. -C N .GE. 4. Note that N will be decremented -C following the deletion. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes in the triangula- -C tion. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. Refer to Sub- -C routine TRMESH. -C -C LWK = Number of columns reserved for IWK. LWK must -C be at least NNB-3, where NNB is the number of -C neighbors of node K, including an extra -C pseudo-node if K is a boundary node. -C -C IWK = Integer work array dimensioned 2 by LWK (or -C array of length .GE. 2*LWK). -C -C On output: -C -C N = Number of nodes in the triangulation on output. -C The input value is decremented unless 1 .LE. IER -C .LE. 4. -C -C X,Y,Z = Updated arrays containing nodal coordinates -C (with elements K+1,...,N+1 shifted up one -C position, thus overwriting element K) unless -C 1 .LE. IER .LE. 4. -C -C LIST,LPTR,LEND,LNEW = Updated triangulation data -C structure reflecting the dele- -C tion unless 1 .LE. IER .LE. 4. -C Note that the data structure -C may have been altered if IER > -C 3. -C -C LWK = Number of IWK columns required unless IER = 1 -C or IER = 3. -C -C IWK = Indexes of the endpoints of the new arcs added -C unless LWK = 0 or 1 .LE. IER .LE. 4. (Arcs -C are associated with columns, or pairs of -C adjacent elements if IWK is declared as a -C singly-subscripted array.) -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if K or N is outside its valid range -C or LWK < 0 on input. -C IER = 2 if more space is required in IWK. -C Refer to LWK. -C IER = 3 if the triangulation data structure is -C invalid on input. -C IER = 4 if K indexes an interior node with -C four or more neighbors, none of which -C can be swapped out due to collineari- -C ty, and K cannot therefore be deleted. -C IER = 5 if an error flag (other than IER = 1) -C was returned by OPTIM. An error -C message is written to the standard -C output unit in this case. -C IER = 6 if error flag 1 was returned by OPTIM. -C This is not necessarily an error, but -C the arcs may not be optimal. -C -C Note that the deletion may result in all remaining nodes -C being collinear. This situation is not flagged. -C -C Modules required by DELNOD: DELNB, LEFT, LSTPTR, NBCNT, -C OPTIM, SWAP, SWPTST -C -C Intrinsic function called by DELNOD: ABS -C -C*********************************************************** -C - INTEGER LSTPTR, NBCNT - INTEGER I, IERR, IWL, J, LNW, LP, LP21, LPF, LPH, LPL, - . LPL2, LPN, LWKL, N1, N2, NFRST, NIT, NL, NN, - . NNB, NR - LOGICAL LEFT - LOGICAL BDRY - REAL X1, X2, XL, XR, Y1, Y2, YL, YR, Z1, Z2, ZL, ZR -C -C Local parameters: -C -C BDRY = Logical variable with value TRUE iff N1 is a -C boundary node -C I,J = DO-loop indexes -C IERR = Error flag returned by OPTIM -C IWL = Number of IWK columns containing arcs -C LNW = Local copy of LNEW -C LP = LIST pointer -C LP21 = LIST pointer returned by SWAP -C LPF,LPL = Pointers to the first and last neighbors of N1 -C LPH = Pointer (or flag) returned by DELNB -C LPL2 = Pointer to the last neighbor of N2 -C LPN = Pointer to a neighbor of N1 -C LWKL = Input value of LWK -C N1 = Local copy of K -C N2 = Neighbor of N1 -C NFRST = First neighbor of N1: LIST(LPF) -C NIT = Number of iterations in OPTIM -C NR,NL = Neighbors of N1 preceding (to the right of) and -C following (to the left of) N2, respectively -C NN = Number of nodes in the triangulation -C NNB = Number of neighbors of N1 (including a pseudo- -C node representing the boundary if N1 is a -C boundary node) -C X1,Y1,Z1 = Coordinates of N1 -C X2,Y2,Z2 = Coordinates of N2 -C XL,YL,ZL = Coordinates of NL -C XR,YR,ZR = Coordinates of NR -C -C -C Set N1 to K and NNB to the number of neighbors of N1 (plus -C one if N1 is a boundary node), and test for errors. LPF -C and LPL are LIST indexes of the first and last neighbors -C of N1, IWL is the number of IWK columns containing arcs, -C and BDRY is TRUE iff N1 is a boundary node. -C - N1 = K - NN = N - IF (N1 .LT. 1 .OR. N1 .GT. NN .OR. NN .LT. 4 .OR. - . LWK .LT. 0) GO TO 21 - LPL = LEND(N1) - LPF = LPTR(LPL) - NNB = NBCNT(LPL,LPTR) - BDRY = LIST(LPL) .LT. 0 - IF (BDRY) NNB = NNB + 1 - IF (NNB .LT. 3) GO TO 23 - LWKL = LWK - LWK = NNB - 3 - IF (LWKL .LT. LWK) GO TO 22 - IWL = 0 - IF (NNB .EQ. 3) GO TO 3 -C -C Initialize for loop on arcs N1-N2 for neighbors N2 of N1, -C beginning with the second neighbor. NR and NL are the -C neighbors preceding and following N2, respectively, and -C LP indexes NL. The loop is exited when all possible -C swaps have been applied to arcs incident on N1. -C - X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - NFRST = LIST(LPF) - NR = NFRST - XR = X(NR) - YR = Y(NR) - ZR = Z(NR) - LP = LPTR(LPF) - N2 = LIST(LP) - X2 = X(N2) - Y2 = Y(N2) - Z2 = Z(N2) - LP = LPTR(LP) -C -C Top of loop: set NL to the neighbor following N2. -C - 1 NL = ABS(LIST(LP)) - IF (NL .EQ. NFRST .AND. BDRY) GO TO 3 - XL = X(NL) - YL = Y(NL) - ZL = Z(NL) -C -C Test for a convex quadrilateral. To avoid an incorrect -C test caused by collinearity, use the fact that if N1 -C is a boundary node, then N1 LEFT NR->NL and if N2 is -C a boundary node, then N2 LEFT NL->NR. -C - LPL2 = LEND(N2) - IF ( .NOT. ((BDRY .OR. LEFT(XR,YR,ZR,XL,YL,ZL,X1,Y1, - . Z1)) .AND. (LIST(LPL2) .LT. 0 .OR. - . LEFT(XL,YL,ZL,XR,YR,ZR,X2,Y2,Z2))) ) THEN -C -C Nonconvex quadrilateral -- no swap is possible. -C - NR = N2 - XR = X2 - YR = Y2 - ZR = Z2 - GO TO 2 - ENDIF -C -C The quadrilateral defined by adjacent triangles -C (N1,N2,NL) and (N2,N1,NR) is convex. Swap in -C NL-NR and store it in IWK unless NL and NR are -C already adjacent, in which case the swap is not -C possible. Indexes larger than N1 must be decremented -C since N1 will be deleted from X, Y, and Z. -C - CALL SWAP (NL,NR,N1,N2, LIST,LPTR,LEND, LP21) - IF (LP21 .EQ. 0) THEN - NR = N2 - XR = X2 - YR = Y2 - ZR = Z2 - GO TO 2 - ENDIF - IWL = IWL + 1 - IF (NL .LE. N1) THEN - IWK(1,IWL) = NL - ELSE - IWK(1,IWL) = NL - 1 - ENDIF - IF (NR .LE. N1) THEN - IWK(2,IWL) = NR - ELSE - IWK(2,IWL) = NR - 1 - ENDIF -C -C Recompute the LIST indexes and NFRST, and decrement NNB. -C - LPL = LEND(N1) - NNB = NNB - 1 - IF (NNB .EQ. 3) GO TO 3 - LPF = LPTR(LPL) - NFRST = LIST(LPF) - LP = LSTPTR(LPL,NL,LIST,LPTR) - IF (NR .EQ. NFRST) GO TO 2 -C -C NR is not the first neighbor of N1. -C Back up and test N1-NR for a swap again: Set N2 to -C NR and NR to the previous neighbor of N1 -- the -C neighbor of NR which follows N1. LP21 points to NL -C as a neighbor of NR. -C - N2 = NR - X2 = XR - Y2 = YR - Z2 = ZR - LP21 = LPTR(LP21) - LP21 = LPTR(LP21) - NR = ABS(LIST(LP21)) - XR = X(NR) - YR = Y(NR) - ZR = Z(NR) - GO TO 1 -C -C Bottom of loop -- test for termination of loop. -C - 2 IF (N2 .EQ. NFRST) GO TO 3 - N2 = NL - X2 = XL - Y2 = YL - Z2 = ZL - LP = LPTR(LP) - GO TO 1 -C -C Delete N1 and all its incident arcs. If N1 is an interior -C node and either NNB > 3 or NNB = 3 and N2 LEFT NR->NL, -C then N1 must be separated from its neighbors by a plane -C containing the origin -- its removal reverses the effect -C of a call to COVSPH, and all its neighbors become -C boundary nodes. This is achieved by treating it as if -C it were a boundary node (setting BDRY to TRUE, changing -C a sign in LIST, and incrementing NNB). -C - 3 IF (.NOT. BDRY) THEN - IF (NNB .GT. 3) THEN - BDRY = .TRUE. - ELSE - LPF = LPTR(LPL) - NR = LIST(LPF) - LP = LPTR(LPF) - N2 = LIST(LP) - NL = LIST(LPL) - BDRY = LEFT(X(NR),Y(NR),Z(NR),X(NL),Y(NL),Z(NL), - . X(N2),Y(N2),Z(N2)) - ENDIF - IF (BDRY) THEN -C -C IF a boundary node already exists, then N1 and its -C neighbors cannot be converted to boundary nodes. -C (They must be collinear.) This is a problem if -C NNB > 3. -C - DO 4 I = 1,NN - IF (LIST(LEND(I)) .LT. 0) THEN - BDRY = .FALSE. - GO TO 5 - ENDIF - 4 CONTINUE - LIST(LPL) = -LIST(LPL) - NNB = NNB + 1 - ENDIF - ENDIF - 5 IF (.NOT. BDRY .AND. NNB .GT. 3) GO TO 24 -C -C Initialize for loop on neighbors. LPL points to the last -C neighbor of N1. LNEW is stored in local variable LNW. -C - LP = LPL - LNW = LNEW -C -C Loop on neighbors N2 of N1, beginning with the first. -C - 6 LP = LPTR(LP) - N2 = ABS(LIST(LP)) - CALL DELNB (N2,N1,N, LIST,LPTR,LEND,LNW, LPH) - IF (LPH .LT. 0) GO TO 23 -C -C LP and LPL may require alteration. -C - IF (LPL .EQ. LNW) LPL = LPH - IF (LP .EQ. LNW) LP = LPH - IF (LP .NE. LPL) GO TO 6 -C -C Delete N1 from X, Y, Z, and LEND, and remove its adjacency -C list from LIST and LPTR. LIST entries (nodal indexes) -C which are larger than N1 must be decremented. -C - NN = NN - 1 - IF (N1 .GT. NN) GO TO 9 - DO 7 I = N1,NN - X(I) = X(I+1) - Y(I) = Y(I+1) - Z(I) = Z(I+1) - LEND(I) = LEND(I+1) - 7 CONTINUE -C - DO 8 I = 1,LNW-1 - IF (LIST(I) .GT. N1) LIST(I) = LIST(I) - 1 - IF (LIST(I) .LT. -N1) LIST(I) = LIST(I) + 1 - 8 CONTINUE -C -C For LPN = first to last neighbors of N1, delete the -C preceding neighbor (indexed by LP). -C -C Each empty LIST,LPTR location LP is filled in with the -C values at LNW-1, and LNW is decremented. All pointers -C (including those in LPTR and LEND) with value LNW-1 -C must be changed to LP. -C -C LPL points to the last neighbor of N1. -C - 9 IF (BDRY) NNB = NNB - 1 - LPN = LPL - DO 13 J = 1,NNB - LNW = LNW - 1 - LP = LPN - LPN = LPTR(LP) - LIST(LP) = LIST(LNW) - LPTR(LP) = LPTR(LNW) - IF (LPTR(LPN) .EQ. LNW) LPTR(LPN) = LP - IF (LPN .EQ. LNW) LPN = LP - DO 10 I = NN,1,-1 - IF (LEND(I) .EQ. LNW) THEN - LEND(I) = LP - GO TO 11 - ENDIF - 10 CONTINUE -C - 11 DO 12 I = LNW-1,1,-1 - IF (LPTR(I) .EQ. LNW) LPTR(I) = LP - 12 CONTINUE - 13 CONTINUE -C -C Update N and LNEW, and optimize the patch of triangles -C containing K (on input) by applying swaps to the arcs -C in IWK. -C - N = NN - LNEW = LNW - IF (IWL .GT. 0) THEN - NIT = 4*IWL - CALL OPTIM (X,Y,Z,IWL, LIST,LPTR,LEND,NIT,IWK, IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 25 - IF (IERR .EQ. 1) GO TO 26 - ENDIF -C -C Successful termination. -C - IER = 0 - RETURN -C -C Invalid input parameter. -C - 21 IER = 1 - RETURN -C -C Insufficient space reserved for IWK. -C - 22 IER = 2 - RETURN -C -C Invalid triangulation data structure. NNB < 3 on input or -C N2 is a neighbor of N1 but N1 is not a neighbor of N2. -C - 23 IER = 3 - RETURN -C -C N1 is interior but NNB could not be reduced to 3. -C - 24 IER = 4 - RETURN -C -C Error flag (other than 1) returned by OPTIM. -C - 25 IER = 5 - WRITE (*,100) NIT, IERR - 100 FORMAT (//5X,'*** Error in OPTIM (called from ', - . 'DELNOD): NIT = ',I4,', IER = ',I1,' ***'/) - RETURN -C -C Error flag 1 returned by OPTIM. -C - 26 IER = 6 - RETURN - END - SUBROUTINE EDGE (IN1,IN2,X,Y,Z, LWK,IWK,LIST,LPTR, - . LEND, IER) - INTEGER IN1, IN2, LWK, IWK(2,*), LIST(*), LPTR(*), - . LEND(*), IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/30/98 -C -C Given a triangulation of N nodes and a pair of nodal -C indexes IN1 and IN2, this routine swaps arcs as necessary -C to force IN1 and IN2 to be adjacent. Only arcs which -C intersect IN1-IN2 are swapped out. If a Delaunay triangu- -C lation is input, the resulting triangulation is as close -C as possible to a Delaunay triangulation in the sense that -C all arcs other than IN1-IN2 are locally optimal. -C -C A sequence of calls to EDGE may be used to force the -C presence of a set of edges defining the boundary of a non- -C convex and/or multiply connected region, or to introduce -C barriers into the triangulation. Note that Subroutine -C GETNP will not necessarily return closest nodes if the -C triangulation has been constrained by a call to EDGE. -C However, this is appropriate in some applications, such -C as triangle-based interpolation on a nonconvex domain. -C -C -C On input: -C -C IN1,IN2 = Indexes (of X, Y, and Z) in the range 1 to -C N defining a pair of nodes to be connected -C by an arc. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C The above parameters are not altered by this routine. -C -C LWK = Number of columns reserved for IWK. This must -C be at least NI -- the number of arcs that -C intersect IN1-IN2. (NI is bounded by N-3.) -C -C IWK = Integer work array of length at least 2*LWK. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LWK = Number of arcs which intersect IN1-IN2 (but -C not more than the input value of LWK) unless -C IER = 1 or IER = 3. LWK = 0 if and only if -C IN1 and IN2 were adjacent (or LWK=0) on input. -C -C IWK = Array containing the indexes of the endpoints -C of the new arcs other than IN1-IN2 unless -C IER > 0 or LWK = 0. New arcs to the left of -C IN1->IN2 are stored in the first K-1 columns -C (left portion of IWK), column K contains -C zeros, and new arcs to the right of IN1->IN2 -C occupy columns K+1,...,LWK. (K can be deter- -C mined by searching IWK for the zeros.) -C -C LIST,LPTR,LEND = Data structure updated if necessary -C to reflect the presence of an arc -C connecting IN1 and IN2 unless IER > -C 0. The data structure has been -C altered if IER >= 4. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if IN1 < 1, IN2 < 1, IN1 = IN2, -C or LWK < 0 on input. -C IER = 2 if more space is required in IWK. -C Refer to LWK. -C IER = 3 if IN1 and IN2 could not be connected -C due to either an invalid data struc- -C ture or collinear nodes (and floating -C point error). -C IER = 4 if an error flag other than IER = 1 -C was returned by OPTIM. -C IER = 5 if error flag 1 was returned by OPTIM. -C This is not necessarily an error, but -C the arcs other than IN1-IN2 may not -C be optimal. -C -C An error message is written to the standard output unit -C in the case of IER = 3 or IER = 4. -C -C Modules required by EDGE: LEFT, LSTPTR, OPTIM, SWAP, -C SWPTST -C -C Intrinsic function called by EDGE: ABS -C -C*********************************************************** -C - LOGICAL LEFT - INTEGER I, IERR, IWC, IWCP1, IWEND, IWF, IWL, LFT, LP, - . LP21, LPL, N0, N1, N1FRST, N1LST, N2, NEXT, - . NIT, NL, NR - REAL DP12, DP1L, DP1R, DP2L, DP2R, X0, X1, X2, Y0, - . Y1, Y2, Z0, Z1, Z2 -C -C Local parameters: -C -C DPij = Dot product -C I = DO-loop index and column index for IWK -C IERR = Error flag returned by Subroutine OPTIM -C IWC = IWK index between IWF and IWL -- NL->NR is -C stored in IWK(1,IWC)->IWK(2,IWC) -C IWCP1 = IWC + 1 -C IWEND = Input or output value of LWK -C IWF = IWK (column) index of the first (leftmost) arc -C which intersects IN1->IN2 -C IWL = IWK (column) index of the last (rightmost) are -C which intersects IN1->IN2 -C LFT = Flag used to determine if a swap results in the -C new arc intersecting IN1-IN2 -- LFT = 0 iff -C N0 = IN1, LFT = -1 implies N0 LEFT IN1->IN2, -C and LFT = 1 implies N0 LEFT IN2->IN1 -C LP = List pointer (index for LIST and LPTR) -C LP21 = Unused parameter returned by SWAP -C LPL = Pointer to the last neighbor of IN1 or NL -C N0 = Neighbor of N1 or node opposite NR->NL -C N1,N2 = Local copies of IN1 and IN2 -C N1FRST = First neighbor of IN1 -C N1LST = (Signed) last neighbor of IN1 -C NEXT = Node opposite NL->NR -C NIT = Flag or number of iterations employed by OPTIM -C NL,NR = Endpoints of an arc which intersects IN1-IN2 -C with NL LEFT IN1->IN2 -C X0,Y0,Z0 = Coordinates of N0 -C X1,Y1,Z1 = Coordinates of IN1 -C X2,Y2,Z2 = Coordinates of IN2 -C -C -C Store IN1, IN2, and LWK in local variables and test for -C errors. -C - N1 = IN1 - N2 = IN2 - IWEND = LWK - IF (N1 .LT. 1 .OR. N2 .LT. 1 .OR. N1 .EQ. N2 .OR. - . IWEND .LT. 0) GO TO 31 -C -C Test for N2 as a neighbor of N1. LPL points to the last -C neighbor of N1. -C - LPL = LEND(N1) - N0 = ABS(LIST(LPL)) - LP = LPL - 1 IF (N0 .EQ. N2) GO TO 30 - LP = LPTR(LP) - N0 = LIST(LP) - IF (LP .NE. LPL) GO TO 1 -C -C Initialize parameters. -C - IWL = 0 - NIT = 0 -C -C Store the coordinates of N1 and N2. -C - 2 X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - X2 = X(N2) - Y2 = Y(N2) - Z2 = Z(N2) -C -C Set NR and NL to adjacent neighbors of N1 such that -C NR LEFT N2->N1 and NL LEFT N1->N2, -C (NR Forward N1->N2 or NL Forward N1->N2), and -C (NR Forward N2->N1 or NL Forward N2->N1). -C -C Initialization: Set N1FRST and N1LST to the first and -C (signed) last neighbors of N1, respectively, and -C initialize NL to N1FRST. -C - LPL = LEND(N1) - N1LST = LIST(LPL) - LP = LPTR(LPL) - N1FRST = LIST(LP) - NL = N1FRST - IF (N1LST .LT. 0) GO TO 4 -C -C N1 is an interior node. Set NL to the first candidate -C for NR (NL LEFT N2->N1). -C - 3 IF (LEFT(X2,Y2,Z2,X1,Y1,Z1,X(NL),Y(NL),Z(NL))) GO TO 4 - LP = LPTR(LP) - NL = LIST(LP) - IF (NL .NE. N1FRST) GO TO 3 -C -C All neighbors of N1 are strictly left of N1->N2. -C - GO TO 5 -C -C NL = LIST(LP) LEFT N2->N1. Set NR to NL and NL to the -C following neighbor of N1. -C - 4 NR = NL - LP = LPTR(LP) - NL = ABS(LIST(LP)) - IF (LEFT(X1,Y1,Z1,X2,Y2,Z2,X(NL),Y(NL),Z(NL)) ) THEN -C -C NL LEFT N1->N2 and NR LEFT N2->N1. The Forward tests -C are employed to avoid an error associated with -C collinear nodes. -C - DP12 = X1*X2 + Y1*Y2 + Z1*Z2 - DP1L = X1*X(NL) + Y1*Y(NL) + Z1*Z(NL) - DP2L = X2*X(NL) + Y2*Y(NL) + Z2*Z(NL) - DP1R = X1*X(NR) + Y1*Y(NR) + Z1*Z(NR) - DP2R = X2*X(NR) + Y2*Y(NR) + Z2*Z(NR) - IF ( (DP2L-DP12*DP1L .GE. 0. .OR. - . DP2R-DP12*DP1R .GE. 0.) .AND. - . (DP1L-DP12*DP2L .GE. 0. .OR. - . DP1R-DP12*DP2R .GE. 0.) ) GO TO 6 -C -C NL-NR does not intersect N1-N2. However, there is -C another candidate for the first arc if NL lies on -C the line N1-N2. -C - IF ( .NOT. LEFT(X2,Y2,Z2,X1,Y1,Z1,X(NL),Y(NL), - . Z(NL)) ) GO TO 5 - ENDIF -C -C Bottom of loop. -C - IF (NL .NE. N1FRST) GO TO 4 -C -C Either the triangulation is invalid or N1-N2 lies on the -C convex hull boundary and an edge NR->NL (opposite N1 and -C intersecting N1-N2) was not found due to floating point -C error. Try interchanging N1 and N2 -- NIT > 0 iff this -C has already been done. -C - 5 IF (NIT .GT. 0) GO TO 33 - NIT = 1 - N1 = N2 - N2 = IN1 - GO TO 2 -C -C Store the ordered sequence of intersecting edges NL->NR in -C IWK(1,IWL)->IWK(2,IWL). -C - 6 IWL = IWL + 1 - IF (IWL .GT. IWEND) GO TO 32 - IWK(1,IWL) = NL - IWK(2,IWL) = NR -C -C Set NEXT to the neighbor of NL which follows NR. -C - LPL = LEND(NL) - LP = LPTR(LPL) -C -C Find NR as a neighbor of NL. The search begins with -C the first neighbor. -C - 7 IF (LIST(LP) .EQ. NR) GO TO 8 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 7 -C -C NR must be the last neighbor, and NL->NR cannot be a -C boundary edge. -C - IF (LIST(LP) .NE. NR) GO TO 33 -C -C Set NEXT to the neighbor following NR, and test for -C termination of the store loop. -C - 8 LP = LPTR(LP) - NEXT = ABS(LIST(LP)) - IF (NEXT .EQ. N2) GO TO 9 -C -C Set NL or NR to NEXT. -C - IF ( LEFT(X1,Y1,Z1,X2,Y2,Z2,X(NEXT),Y(NEXT),Z(NEXT)) ) - . THEN - NL = NEXT - ELSE - NR = NEXT - ENDIF - GO TO 6 -C -C IWL is the number of arcs which intersect N1-N2. -C Store LWK. -C - 9 LWK = IWL - IWEND = IWL -C -C Initialize for edge swapping loop -- all possible swaps -C are applied (even if the new arc again intersects -C N1-N2), arcs to the left of N1->N2 are stored in the -C left portion of IWK, and arcs to the right are stored in -C the right portion. IWF and IWL index the first and last -C intersecting arcs. -C - IWF = 1 -C -C Top of loop -- set N0 to N1 and NL->NR to the first edge. -C IWC points to the arc currently being processed. LFT -C .LE. 0 iff N0 LEFT N1->N2. -C - 10 LFT = 0 - N0 = N1 - X0 = X1 - Y0 = Y1 - Z0 = Z1 - NL = IWK(1,IWF) - NR = IWK(2,IWF) - IWC = IWF -C -C Set NEXT to the node opposite NL->NR unless IWC is the -C last arc. -C - 11 IF (IWC .EQ. IWL) GO TO 21 - IWCP1 = IWC + 1 - NEXT = IWK(1,IWCP1) - IF (NEXT .NE. NL) GO TO 16 - NEXT = IWK(2,IWCP1) -C -C NEXT RIGHT N1->N2 and IWC .LT. IWL. Test for a possible -C swap. -C - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 14 - IF (LFT .GE. 0) GO TO 12 - IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 14 -C -C Replace NL->NR with N0->NEXT. -C - CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = N0 - IWK(2,IWC) = NEXT - GO TO 15 -C -C Swap NL-NR for N0-NEXT, shift columns IWC+1,...,IWL to -C the left, and store N0-NEXT in the right portion of -C IWK. -C - 12 CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - DO 13 I = IWCP1,IWL - IWK(1,I-1) = IWK(1,I) - IWK(2,I-1) = IWK(2,I) - 13 CONTINUE - IWK(1,IWL) = N0 - IWK(2,IWL) = NEXT - IWL = IWL - 1 - NR = NEXT - GO TO 11 -C -C A swap is not possible. Set N0 to NR. -C - 14 N0 = NR - X0 = X(N0) - Y0 = Y(N0) - Z0 = Z(N0) - LFT = 1 -C -C Advance to the next arc. -C - 15 NR = NEXT - IWC = IWC + 1 - GO TO 11 -C -C NEXT LEFT N1->N2, NEXT .NE. N2, and IWC .LT. IWL. -C Test for a possible swap. -C - 16 IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 19 - IF (LFT .LE. 0) GO TO 17 - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 19 -C -C Replace NL->NR with NEXT->N0. -C - CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = NEXT - IWK(2,IWC) = N0 - GO TO 20 -C -C Swap NL-NR for N0-NEXT, shift columns IWF,...,IWC-1 to -C the right, and store N0-NEXT in the left portion of -C IWK. -C - 17 CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - DO 18 I = IWC-1,IWF,-1 - IWK(1,I+1) = IWK(1,I) - IWK(2,I+1) = IWK(2,I) - 18 CONTINUE - IWK(1,IWF) = N0 - IWK(2,IWF) = NEXT - IWF = IWF + 1 - GO TO 20 -C -C A swap is not possible. Set N0 to NL. -C - 19 N0 = NL - X0 = X(N0) - Y0 = Y(N0) - Z0 = Z(N0) - LFT = -1 -C -C Advance to the next arc. -C - 20 NL = NEXT - IWC = IWC + 1 - GO TO 11 -C -C N2 is opposite NL->NR (IWC = IWL). -C - 21 IF (N0 .EQ. N1) GO TO 24 - IF (LFT .LT. 0) GO TO 22 -C -C N0 RIGHT N1->N2. Test for a possible swap. -C - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X2,Y2,Z2) ) - . GO TO 10 -C -C Swap NL-NR for N0-N2 and store N0-N2 in the right -C portion of IWK. -C - CALL SWAP (N2,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWL) = N0 - IWK(2,IWL) = N2 - IWL = IWL - 1 - GO TO 10 -C -C N0 LEFT N1->N2. Test for a possible swap. -C - 22 IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X2,Y2,Z2) ) - . GO TO 10 -C -C Swap NL-NR for N0-N2, shift columns IWF,...,IWL-1 to the -C right, and store N0-N2 in the left portion of IWK. -C - CALL SWAP (N2,N0,NL,NR, LIST,LPTR,LEND, LP21) - I = IWL - 23 IWK(1,I) = IWK(1,I-1) - IWK(2,I) = IWK(2,I-1) - I = I - 1 - IF (I .GT. IWF) GO TO 23 - IWK(1,IWF) = N0 - IWK(2,IWF) = N2 - IWF = IWF + 1 - GO TO 10 -C -C IWF = IWC = IWL. Swap out the last arc for N1-N2 and -C store zeros in IWK. -C - 24 CALL SWAP (N2,N1,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = 0 - IWK(2,IWC) = 0 -C -C Optimization procedure -- -C - IER = 0 - IF (IWC .GT. 1) THEN -C -C Optimize the set of new arcs to the left of IN1->IN2. -C - NIT = 4*(IWC-1) - CALL OPTIM (X,Y,Z,IWC-1, LIST,LPTR,LEND,NIT, - . IWK, IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 34 - IF (IERR .EQ. 1) IER = 5 - ENDIF - IF (IWC .LT. IWEND) THEN -C -C Optimize the set of new arcs to the right of IN1->IN2. -C - NIT = 4*(IWEND-IWC) - CALL OPTIM (X,Y,Z,IWEND-IWC, LIST,LPTR,LEND,NIT, - . IWK(1,IWC+1), IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 34 - IF (IERR .EQ. 1) GO TO 35 - ENDIF - IF (IER .EQ. 5) GO TO 35 -C -C Successful termination (IER = 0). -C - RETURN -C -C IN1 and IN2 were adjacent on input. -C - 30 IER = 0 - RETURN -C -C Invalid input parameter. -C - 31 IER = 1 - RETURN -C -C Insufficient space reserved for IWK. -C - 32 IER = 2 - RETURN -C -C Invalid triangulation data structure or collinear nodes -C on convex hull boundary. -C - 33 IER = 3 - WRITE (*,130) IN1, IN2 - 130 FORMAT (//5X,'*** Error in EDGE: Invalid triangula', - . 'tion or null triangles on boundary'/ - . 9X,'IN1 =',I4,', IN2=',I4/) - RETURN -C -C Error flag (other than 1) returned by OPTIM. -C - 34 IER = 4 - WRITE (*,140) NIT, IERR - 140 FORMAT (//5X,'*** Error in OPTIM (called from EDGE):', - . ' NIT = ',I4,', IER = ',I1,' ***'/) - RETURN -C -C Error flag 1 returned by OPTIM. -C - 35 IER = 5 - RETURN - END - SUBROUTINE GETNP (X,Y,Z,LIST,LPTR,LEND,L, NPTS, DF, - . IER) - INTEGER LIST(*), LPTR(*), LEND(*), L, NPTS(L), IER - REAL X(*), Y(*), Z(*), DF -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C Given a Delaunay triangulation of N nodes on the unit -C sphere and an array NPTS containing the indexes of L-1 -C nodes ordered by angular distance from NPTS(1), this sub- -C routine sets NPTS(L) to the index of the next node in the -C sequence -- the node, other than NPTS(1),...,NPTS(L-1), -C that is closest to NPTS(1). Thus, the ordered sequence -C of K closest nodes to N1 (including N1) may be determined -C by K-1 calls to GETNP with NPTS(1) = N1 and L = 2,3,...,K -C for K .GE. 2. -C -C The algorithm uses the property of a Delaunay triangula- -C tion that the K-th closest node to N1 is a neighbor of one -C of the K-1 closest nodes to N1. -C -C -C On input: -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C LIST,LPTR,LEND = Triangulation data structure. Re- -C fer to Subroutine TRMESH. -C -C L = Number of nodes in the sequence on output. 2 -C .LE. L .LE. N. -C -C The above parameters are not altered by this routine. -C -C NPTS = Array of length .GE. L containing the indexes -C of the L-1 closest nodes to NPTS(1) in the -C first L-1 locations. -C -C On output: -C -C NPTS = Array updated with the index of the L-th -C closest node to NPTS(1) in position L unless -C IER = 1. -C -C DF = Value of an increasing function (negative cos- -C ine) of the angular distance between NPTS(1) -C and NPTS(L) unless IER = 1. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if L < 2. -C -C Modules required by GETNP: None -C -C Intrinsic function called by GETNP: ABS -C -C*********************************************************** -C - INTEGER I, LM1, LP, LPL, N1, NB, NI, NP - REAL DNB, DNP, X1, Y1, Z1 -C -C Local parameters: -C -C DNB,DNP = Negative cosines of the angular distances from -C N1 to NB and to NP, respectively -C I = NPTS index and DO-loop index -C LM1 = L-1 -C LP = LIST pointer of a neighbor of NI -C LPL = Pointer to the last neighbor of NI -C N1 = NPTS(1) -C NB = Neighbor of NI and candidate for NP -C NI = NPTS(I) -C NP = Candidate for NPTS(L) -C X1,Y1,Z1 = Coordinates of N1 -C - LM1 = L - 1 - IF (LM1 .LT. 1) GO TO 6 - IER = 0 -C -C Store N1 = NPTS(1) and mark the elements of NPTS. -C - N1 = NPTS(1) - X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - DO 1 I = 1,LM1 - NI = NPTS(I) - LEND(NI) = -LEND(NI) - 1 CONTINUE -C -C Candidates for NP = NPTS(L) are the unmarked neighbors -C of nodes in NPTS. DNP is initially greater than -cos(PI) -C (the maximum distance). -C - DNP = 2. -C -C Loop on nodes NI in NPTS. -C - DO 4 I = 1,LM1 - NI = NPTS(I) - LPL = -LEND(NI) - LP = LPL -C -C Loop on neighbors NB of NI. -C - 2 NB = ABS(LIST(LP)) - IF (LEND(NB) .LT. 0) GO TO 3 -C -C NB is an unmarked neighbor of NI. Replace NP if NB is -C closer to N1. -C - DNB = -(X(NB)*X1 + Y(NB)*Y1 + Z(NB)*Z1) - IF (DNB .GE. DNP) GO TO 3 - NP = NB - DNP = DNB - 3 LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 2 - 4 CONTINUE - NPTS(L) = NP - DF = DNP -C -C Unmark the elements of NPTS. -C - DO 5 I = 1,LM1 - NI = NPTS(I) - LEND(NI) = -LEND(NI) - 5 CONTINUE - RETURN -C -C L is outside its valid range. -C - 6 IER = 1 - RETURN - END - SUBROUTINE INSERT (K,LP, LIST,LPTR,LNEW ) - INTEGER K, LP, LIST(*), LPTR(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine inserts K as a neighbor of N1 following -C N2, where LP is the LIST pointer of N2 as a neighbor of -C N1. Note that, if N2 is the last neighbor of N1, K will -C become the first neighbor (even if N1 is a boundary node). -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C K = Index of the node to be inserted. -C -C LP = LIST pointer of N2 as a neighbor of N1. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LNEW = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LIST,LPTR,LNEW = Data structure updated with the -C addition of node K. -C -C Modules required by INSERT: None -C -C*********************************************************** -C - INTEGER LSAV -C - LSAV = LPTR(LP) - LPTR(LP) = LNEW - LIST(LNEW) = K - LPTR(LNEW) = LSAV - LNEW = LNEW + 1 - RETURN - END - LOGICAL FUNCTION INSIDE (P,LV,XV,YV,ZV,NV,LISTV, IER) - INTEGER LV, NV, LISTV(NV), IER - REAL P(3), XV(LV), YV(LV), ZV(LV) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 12/27/93 -C -C This function locates a point P relative to a polygonal -C region R on the surface of the unit sphere, returning -C INSIDE = TRUE if and only if P is contained in R. R is -C defined by a cyclically ordered sequence of vertices which -C form a positively-oriented simple closed curve. Adjacent -C vertices need not be distinct but the curve must not be -C self-intersecting. Also, while polygon edges are by defi- -C nition restricted to a single hemisphere, R is not so -C restricted. Its interior is the region to the left as the -C vertices are traversed in order. -C -C The algorithm consists of selecting a point Q in R and -C then finding all points at which the great circle defined -C by P and Q intersects the boundary of R. P lies inside R -C if and only if there is an even number of intersection -C points between Q and P. Q is taken to be a point immedi- -C ately to the left of a directed boundary edge -- the first -C one that results in no consistency-check failures. -C -C If P is close to the polygon boundary, the problem is -C ill-conditioned and the decision may be incorrect. Also, -C an incorrect decision may result from a poor choice of Q -C (if, for example, a boundary edge lies on the great cir- -C cle defined by P and Q). A more reliable result could be -C obtained by a sequence of calls to INSIDE with the ver- -C tices cyclically permuted before each call (to alter the -C choice of Q). -C -C -C On input: -C -C P = Array of length 3 containing the Cartesian -C coordinates of the point (unit vector) to be -C located. -C -C LV = Length of arrays XV, YV, and ZV. -C -C XV,YV,ZV = Arrays of length LV containing the Carte- -C sian coordinates of unit vectors (points -C on the unit sphere). These values are -C not tested for validity. -C -C NV = Number of vertices in the polygon. 3 .LE. NV -C .LE. LV. -C -C LISTV = Array of length NV containing the indexes -C (for XV, YV, and ZV) of a cyclically-ordered -C (and CCW-ordered) sequence of vertices that -C define R. The last vertex (indexed by -C LISTV(NV)) is followed by the first (indexed -C by LISTV(1)). LISTV entries must be in the -C range 1 to LV. -C -C Input parameters are not altered by this function. -C -C On output: -C -C INSIDE = TRUE if and only if P lies inside R unless -C IER .NE. 0, in which case the value is not -C altered. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LV or NV is outside its valid -C range. -C IER = 2 if a LISTV entry is outside its valid -C range. -C IER = 3 if the polygon boundary was found to -C be self-intersecting. This error will -C not necessarily be detected. -C IER = 4 if every choice of Q (one for each -C boundary edge) led to failure of some -C internal consistency check. The most -C likely cause of this error is invalid -C input: P = (0,0,0), a null or self- -C intersecting polygon, etc. -C -C Module required by INSIDE: INTRSC -C -C Intrinsic function called by INSIDE: SQRT -C -C*********************************************************** -C - INTEGER I1, I2, IERR, IMX, K, K0, N, NI - LOGICAL EVEN, LFT1, LFT2, PINR, QINR - REAL B(3), BP, BQ, CN(3), D, EPS, PN(3), Q(3), - . QN(3), QNRM, V1(3), V2(3), VN(3), VNRM -C -C Local parameters: -C -C B = Intersection point between the boundary and -C the great circle defined by P and Q -C BP,BQ = and , respectively, maximized over -C intersection points B that lie between P and -C Q (on the shorter arc) -- used to find the -C closest intersection points to P and Q -C CN = Q X P = normal to the plane of P and Q -C D = Dot product or -C EPS = Parameter used to define Q as the point whose -C orthogonal distance to (the midpoint of) -C boundary edge V1->V2 is approximately EPS/ -C (2*Cos(A/2)), where = Cos(A). -C EVEN = TRUE iff an even number of intersection points -C lie between P and Q (on the shorter arc) -C I1,I2 = Indexes (LISTV elements) of a pair of adjacent -C boundary vertices (endpoints of a boundary -C edge) -C IERR = Error flag for calls to INTRSC (not tested) -C IMX = Local copy of LV and maximum value of I1 and -C I2 -C K = DO-loop index and LISTV index -C K0 = LISTV index of the first endpoint of the -C boundary edge used to compute Q -C LFT1,LFT2 = Logical variables associated with I1 and I2 in -C the boundary traversal: TRUE iff the vertex -C is strictly to the left of Q->P ( > 0) -C N = Local copy of NV -C NI = Number of intersections (between the boundary -C curve and the great circle P-Q) encountered -C PINR = TRUE iff P is to the left of the directed -C boundary edge associated with the closest -C intersection point to P that lies between P -C and Q (a left-to-right intersection as -C viewed from Q), or there is no intersection -C between P and Q (on the shorter arc) -C PN,QN = P X CN and CN X Q, respectively: used to -C locate intersections B relative to arc Q->P -C Q = (V1 + V2 + EPS*VN/VNRM)/QNRM, where V1->V2 is -C the boundary edge indexed by LISTV(K0) -> -C LISTV(K0+1) -C QINR = TRUE iff Q is to the left of the directed -C boundary edge associated with the closest -C intersection point to Q that lies between P -C and Q (a right-to-left intersection as -C viewed from Q), or there is no intersection -C between P and Q (on the shorter arc) -C QNRM = Euclidean norm of V1+V2+EPS*VN/VNRM used to -C compute (normalize) Q -C V1,V2 = Vertices indexed by I1 and I2 in the boundary -C traversal -C VN = V1 X V2, where V1->V2 is the boundary edge -C indexed by LISTV(K0) -> LISTV(K0+1) -C VNRM = Euclidean norm of VN -C - DATA EPS/1.E-3/ -C -C Store local parameters, test for error 1, and initialize -C K0. -C - IMX = LV - N = NV - IF (N .LT. 3 .OR. N .GT. IMX) GO TO 11 - K0 = 0 - I1 = LISTV(1) - IF (I1 .LT. 1 .OR. I1 .GT. IMX) GO TO 12 -C -C Increment K0 and set Q to a point immediately to the left -C of the midpoint of edge V1->V2 = LISTV(K0)->LISTV(K0+1): -C Q = (V1 + V2 + EPS*VN/VNRM)/QNRM, where VN = V1 X V2. -C - 1 K0 = K0 + 1 - IF (K0 .GT. N) GO TO 14 - I1 = LISTV(K0) - IF (K0 .LT. N) THEN - I2 = LISTV(K0+1) - ELSE - I2 = LISTV(1) - ENDIF - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - VN(1) = YV(I1)*ZV(I2) - ZV(I1)*YV(I2) - VN(2) = ZV(I1)*XV(I2) - XV(I1)*ZV(I2) - VN(3) = XV(I1)*YV(I2) - YV(I1)*XV(I2) - VNRM = SQRT(VN(1)*VN(1) + VN(2)*VN(2) + VN(3)*VN(3)) - IF (VNRM .EQ. 0.) GO TO 1 - Q(1) = XV(I1) + XV(I2) + EPS*VN(1)/VNRM - Q(2) = YV(I1) + YV(I2) + EPS*VN(2)/VNRM - Q(3) = ZV(I1) + ZV(I2) + EPS*VN(3)/VNRM - QNRM = SQRT(Q(1)*Q(1) + Q(2)*Q(2) + Q(3)*Q(3)) - Q(1) = Q(1)/QNRM - Q(2) = Q(2)/QNRM - Q(3) = Q(3)/QNRM -C -C Compute CN = Q X P, PN = P X CN, and QN = CN X Q. -C - CN(1) = Q(2)*P(3) - Q(3)*P(2) - CN(2) = Q(3)*P(1) - Q(1)*P(3) - CN(3) = Q(1)*P(2) - Q(2)*P(1) - IF (CN(1) .EQ. 0. .AND. CN(2) .EQ. 0. .AND. - . CN(3) .EQ. 0.) GO TO 1 - PN(1) = P(2)*CN(3) - P(3)*CN(2) - PN(2) = P(3)*CN(1) - P(1)*CN(3) - PN(3) = P(1)*CN(2) - P(2)*CN(1) - QN(1) = CN(2)*Q(3) - CN(3)*Q(2) - QN(2) = CN(3)*Q(1) - CN(1)*Q(3) - QN(3) = CN(1)*Q(2) - CN(2)*Q(1) -C -C Initialize parameters for the boundary traversal. -C - NI = 0 - EVEN = .TRUE. - BP = -2. - BQ = -2. - PINR = .TRUE. - QINR = .TRUE. - I2 = LISTV(N) - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - LFT2 = CN(1)*XV(I2) + CN(2)*YV(I2) + - . CN(3)*ZV(I2) .GT. 0. -C -C Loop on boundary arcs I1->I2. -C - DO 2 K = 1,N - I1 = I2 - LFT1 = LFT2 - I2 = LISTV(K) - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - LFT2 = CN(1)*XV(I2) + CN(2)*YV(I2) + - . CN(3)*ZV(I2) .GT. 0. - IF (LFT1 .EQV. LFT2) GO TO 2 -C -C I1 and I2 are on opposite sides of Q->P. Compute the -C point of intersection B. -C - NI = NI + 1 - V1(1) = XV(I1) - V1(2) = YV(I1) - V1(3) = ZV(I1) - V2(1) = XV(I2) - V2(2) = YV(I2) - V2(3) = ZV(I2) - CALL INTRSC (V1,V2,CN, B,IERR) -C -C B is between Q and P (on the shorter arc) iff -C B Forward Q->P and B Forward P->Q iff -C > 0 and > 0. -C - IF (B(1)*QN(1) + B(2)*QN(2) + B(3)*QN(3) .GT. 0. - . .AND. - . B(1)*PN(1) + B(2)*PN(2) + B(3)*PN(3) .GT. 0.) - . THEN -C -C Update EVEN, BQ, QINR, BP, and PINR. -C - EVEN = .NOT. EVEN - D = B(1)*Q(1) + B(2)*Q(2) + B(3)*Q(3) - IF (D .GT. BQ) THEN - BQ = D - QINR = LFT2 - ENDIF - D = B(1)*P(1) + B(2)*P(2) + B(3)*P(3) - IF (D .GT. BP) THEN - BP = D - PINR = LFT1 - ENDIF - ENDIF - 2 CONTINUE -C -C Test for consistency: NI must be even and QINR must be -C TRUE. -C - IF (NI .NE. 2*(NI/2) .OR. .NOT. QINR) GO TO 1 -C -C Test for error 3: different values of PINR and EVEN. -C - IF (PINR .NEQV. EVEN) GO TO 13 -C -C No error encountered. -C - IER = 0 - INSIDE = EVEN - RETURN -C -C LV or NV is outside its valid range. -C - 11 IER = 1 - RETURN -C -C A LISTV entry is outside its valid range. -C - 12 IER = 2 - RETURN -C -C The polygon boundary is self-intersecting. -C - 13 IER = 3 - RETURN -C -C Consistency tests failed for all values of Q. -C - 14 IER = 4 - RETURN - END - SUBROUTINE INTADD (KK,I1,I2,I3, LIST,LPTR,LEND,LNEW ) - INTEGER KK, I1, I2, I3, LIST(*), LPTR(*), LEND(*), - . LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine adds an interior node to a triangulation -C of a set of points on the unit sphere. The data structure -C is updated with the insertion of node KK into the triangle -C whose vertices are I1, I2, and I3. No optimization of the -C triangulation is performed. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C KK = Index of the node to be inserted. KK .GE. 1 -C and KK must not be equal to I1, I2, or I3. -C -C I1,I2,I3 = Indexes of the counterclockwise-ordered -C sequence of vertices of a triangle which -C contains node KK. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. Refer to Sub- -C routine TRMESH. Triangle -C (I1,I2,I3) must be included -C in the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK. KK -C will be connected to nodes I1, -C I2, and I3. -C -C Modules required by INTADD: INSERT, LSTPTR -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER K, LP, N1, N2, N3 -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C N1,N2,N3 = Local copies of I1, I2, and I3 -C - K = KK -C -C Initialization. -C - N1 = I1 - N2 = I2 - N3 = I3 -C -C Add K as a neighbor of I1, I2, and I3. -C - LP = LSTPTR(LEND(N1),N2,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - LP = LSTPTR(LEND(N2),N3,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - LP = LSTPTR(LEND(N3),N1,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) -C -C Add I1, I2, and I3 as neighbors of K. -C - LIST(LNEW) = N1 - LIST(LNEW+1) = N2 - LIST(LNEW+2) = N3 - LPTR(LNEW) = LNEW + 1 - LPTR(LNEW+1) = LNEW + 2 - LPTR(LNEW+2) = LNEW - LEND(K) = LNEW + 2 - LNEW = LNEW + 3 - RETURN - END - SUBROUTINE INTRSC (P1,P2,CN, P,IER) - INTEGER IER - REAL P1(3), P2(3), CN(3), P(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/19/90 -C -C Given a great circle C and points P1 and P2 defining an -C arc A on the surface of the unit sphere, where A is the -C shorter of the two portions of the great circle C12 assoc- -C iated with P1 and P2, this subroutine returns the point -C of intersection P between C and C12 that is closer to A. -C Thus, if P1 and P2 lie in opposite hemispheres defined by -C C, P is the point of intersection of C with A. -C -C -C On input: -C -C P1,P2 = Arrays of length 3 containing the Cartesian -C coordinates of unit vectors. -C -C CN = Array of length 3 containing the Cartesian -C coordinates of a nonzero vector which defines C -C as the intersection of the plane whose normal -C is CN with the unit sphere. Thus, if C is to -C be the great circle defined by P and Q, CN -C should be P X Q. -C -C The above parameters are not altered by this routine. -C -C P = Array of length 3. -C -C On output: -C -C P = Point of intersection defined above unless IER -C .NE. 0, in which case P is not altered. -C -C IER = Error indicator. -C IER = 0 if no errors were encountered. -C IER = 1 if = . This occurs -C iff P1 = P2 or CN = 0 or there are -C two intersection points at the same -C distance from A. -C IER = 2 if P2 = -P1 and the definition of A is -C therefore ambiguous. -C -C Modules required by INTRSC: None -C -C Intrinsic function called by INTRSC: SQRT -C -C*********************************************************** -C - INTEGER I - REAL D1, D2, PP(3), PPN, T -C -C Local parameters: -C -C D1 = -C D2 = -C I = DO-loop index -C PP = P1 + T*(P2-P1) = Parametric representation of the -C line defined by P1 and P2 -C PPN = Norm of PP -C T = D1/(D1-D2) = Parameter value chosen so that PP lies -C in the plane of C -C - D1 = CN(1)*P1(1) + CN(2)*P1(2) + CN(3)*P1(3) - D2 = CN(1)*P2(1) + CN(2)*P2(2) + CN(3)*P2(3) -C - IF (D1 .EQ. D2) THEN - IER = 1 - RETURN - ENDIF -C -C Solve for T such that = 0 and compute PP and PPN. -C - T = D1/(D1-D2) - PPN = 0. - DO 1 I = 1,3 - PP(I) = P1(I) + T*(P2(I)-P1(I)) - PPN = PPN + PP(I)*PP(I) - 1 CONTINUE -C -C PPN = 0 iff PP = 0 iff P2 = -P1 (and T = .5). -C - IF (PPN .EQ. 0.) THEN - IER = 2 - RETURN - ENDIF - PPN = SQRT(PPN) -C -C Compute P = PP/PPN. -C - DO 2 I = 1,3 - P(I) = PP(I)/PPN - 2 CONTINUE - IER = 0 - RETURN - END - INTEGER FUNCTION JRAND (N, IX,IY,IZ ) - INTEGER N, IX, IY, IZ -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C This function returns a uniformly distributed pseudo- -C random integer in the range 1 to N. -C -C -C On input: -C -C N = Maximum value to be returned. -C -C N is not altered by this function. -C -C IX,IY,IZ = Integer seeds initialized to values in -C the range 1 to 30,000 before the first -C call to JRAND, and not altered between -C subsequent calls (unless a sequence of -C random numbers is to be repeated by -C reinitializing the seeds). -C -C On output: -C -C IX,IY,IZ = Updated integer seeds. -C -C JRAND = Random integer in the range 1 to N. -C -C Reference: B. A. Wichmann and I. D. Hill, "An Efficient -C and Portable Pseudo-random Number Generator", -C Applied Statistics, Vol. 31, No. 2, 1982, -C pp. 188-190. -C -C Modules required by JRAND: None -C -C Intrinsic functions called by JRAND: INT, MOD, REAL -C -C*********************************************************** -C - REAL U, X -C -C Local parameters: -C -C U = Pseudo-random number uniformly distributed in the -C interval (0,1). -C X = Pseudo-random number in the range 0 to 3 whose frac- -C tional part is U. -C - IX = MOD(171*IX,30269) - IY = MOD(172*IY,30307) - IZ = MOD(170*IZ,30323) - X = (REAL(IX)/30269.) + (REAL(IY)/30307.) + - . (REAL(IZ)/30323.) - U = X - INT(X) - JRAND = REAL(N)*U + 1. - RETURN - END - LOGICAL FUNCTION LEFT (X1,Y1,Z1,X2,Y2,Z2,X0,Y0,Z0) - REAL X1, Y1, Z1, X2, Y2, Z2, X0, Y0, Z0 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function determines whether node N0 is in the -C (closed) left hemisphere defined by the plane containing -C N1, N2, and the origin, where left is defined relative to -C an observer at N1 facing N2. -C -C -C On input: -C -C X1,Y1,Z1 = Coordinates of N1. -C -C X2,Y2,Z2 = Coordinates of N2. -C -C X0,Y0,Z0 = Coordinates of N0. -C -C Input parameters are not altered by this function. -C -C On output: -C -C LEFT = TRUE if and only if N0 is in the closed -C left hemisphere. -C -C Modules required by LEFT: None -C -C*********************************************************** -C -C LEFT = TRUE iff = det(N0,N1,N2) .GE. 0. -C - LEFT = X0*(Y1*Z2-Y2*Z1) - Y0*(X1*Z2-X2*Z1) + - . Z0*(X1*Y2-X2*Y1) .GE. 0. - RETURN - END - INTEGER FUNCTION LSTPTR (LPL,NB,LIST,LPTR) - INTEGER LPL, NB, LIST(*), LPTR(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function returns the index (LIST pointer) of NB in -C the adjacency list for N0, where LPL = LEND(N0). -C -C This function is identical to the similarly named -C function in TRIPACK. -C -C -C On input: -C -C LPL = LEND(N0) -C -C NB = Index of the node whose pointer is to be re- -C turned. NB must be connected to N0. -C -C LIST,LPTR = Data structure defining the triangula- -C tion. Refer to Subroutine TRMESH. -C -C Input parameters are not altered by this function. -C -C On output: -C -C LSTPTR = Pointer such that LIST(LSTPTR) = NB or -C LIST(LSTPTR) = -NB, unless NB is not a -C neighbor of N0, in which case LSTPTR = LPL. -C -C Modules required by LSTPTR: None -C -C*********************************************************** -C - INTEGER LP, ND -C -C Local parameters: -C -C LP = LIST pointer -C ND = Nodal index -C - LP = LPTR(LPL) - 1 ND = LIST(LP) - IF (ND .EQ. NB) GO TO 2 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 1 -C - 2 LSTPTR = LP - RETURN - END - INTEGER FUNCTION NBCNT (LPL,LPTR) - INTEGER LPL, LPTR(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function returns the number of neighbors of a node -C N0 in a triangulation created by Subroutine TRMESH. -C -C This function is identical to the similarly named -C function in TRIPACK. -C -C -C On input: -C -C LPL = LIST pointer to the last neighbor of N0 -- -C LPL = LEND(N0). -C -C LPTR = Array of pointers associated with LIST. -C -C Input parameters are not altered by this function. -C -C On output: -C -C NBCNT = Number of neighbors of N0. -C -C Modules required by NBCNT: None -C -C*********************************************************** -C - INTEGER K, LP -C -C Local parameters: -C -C K = Counter for computing the number of neighbors -C LP = LIST pointer -C - LP = LPL - K = 1 -C - 1 LP = LPTR(LP) - IF (LP .EQ. LPL) GO TO 2 - K = K + 1 - GO TO 1 -C - 2 NBCNT = K - RETURN - END - INTEGER FUNCTION NEARND (P,IST,N,X,Y,Z,LIST,LPTR, - . LEND, AL) - INTEGER IST, N, LIST(*), LPTR(*), LEND(N) - REAL P(3), X(N), Y(N), Z(N), AL -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C Given a point P on the surface of the unit sphere and a -C Delaunay triangulation created by Subroutine TRMESH, this -C function returns the index of the nearest triangulation -C node to P. -C -C The algorithm consists of implicitly adding P to the -C triangulation, finding the nearest neighbor to P, and -C implicitly deleting P from the triangulation. Thus, it -C is based on the fact that, if P is a node in a Delaunay -C triangulation, the nearest node to P is a neighbor of P. -C -C -C On input: -C -C P = Array of length 3 containing the Cartesian coor- -C dinates of the point P to be located relative to -C the triangulation. It is assumed without a test -C that P(1)**2 + P(2)**2 + P(3)**2 = 1. -C -C IST = Index of a node at which TRFIND begins the -C search. Search time depends on the proximity -C of this node to P. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to TRMESH. -C -C Input parameters are not altered by this function. -C -C On output: -C -C NEARND = Nodal index of the nearest node to P, or 0 -C if N < 3 or the triangulation data struc- -C ture is invalid. -C -C AL = Arc length (angular distance in radians) be- -C tween P and NEARND unless NEARND = 0. -C -C Note that the number of candidates for NEARND -C (neighbors of P) is limited to LMAX defined in -C the PARAMETER statement below. -C -C Modules required by NEARND: JRAND, LSTPTR, TRFIND, STORE -C -C Intrinsic functions called by NEARND: ABS, ACOS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER LMAX - PARAMETER (LMAX=25) - INTEGER I1, I2, I3, L, LISTP(LMAX), LP, LP1, LP2, - . LPL, LPTRP(LMAX), N1, N2, N3, NN, NR, NST - REAL B1, B2, B3, DS1, DSR, DX1, DX2, DX3, DY1, - . DY2, DY3, DZ1, DZ2, DZ3 -C -C Local parameters: -C -C B1,B2,B3 = Unnormalized barycentric coordinates returned -C by TRFIND -C DS1 = (Negative cosine of the) distance from P to N1 -C DSR = (Negative cosine of the) distance from P to NR -C DX1,..DZ3 = Components of vectors used by the swap test -C I1,I2,I3 = Nodal indexes of a triangle containing P, or -C the rightmost (I1) and leftmost (I2) visible -C boundary nodes as viewed from P -C L = Length of LISTP/LPTRP and number of neighbors -C of P -C LMAX = Maximum value of L -C LISTP = Indexes of the neighbors of P -C LPTRP = Array of pointers in 1-1 correspondence with -C LISTP elements -C LP = LIST pointer to a neighbor of N1 and LISTP -C pointer -C LP1,LP2 = LISTP indexes (pointers) -C LPL = Pointer to the last neighbor of N1 -C N1 = Index of a node visible from P -C N2 = Index of an endpoint of an arc opposite P -C N3 = Index of the node opposite N1->N2 -C NN = Local copy of N -C NR = Index of a candidate for the nearest node to P -C NST = Index of the node at which TRFIND begins the -C search -C -C -C Store local parameters and test for N invalid. -C - NN = N - IF (NN .LT. 3) GO TO 6 - NST = IST - IF (NST .LT. 1 .OR. NST .GT. NN) NST = 1 -C -C Find a triangle (I1,I2,I3) containing P, or the rightmost -C (I1) and leftmost (I2) visible boundary nodes as viewed -C from P. -C - CALL TRFIND (NST,P,N,X,Y,Z,LIST,LPTR,LEND, B1,B2,B3, - . I1,I2,I3) -C -C Test for collinear nodes. -C - IF (I1 .EQ. 0) GO TO 6 -C -C Store the linked list of 'neighbors' of P in LISTP and -C LPTRP. I1 is the first neighbor, and 0 is stored as -C the last neighbor if P is not contained in a triangle. -C L is the length of LISTP and LPTRP, and is limited to -C LMAX. -C - IF (I3 .NE. 0) THEN - LISTP(1) = I1 - LPTRP(1) = 2 - LISTP(2) = I2 - LPTRP(2) = 3 - LISTP(3) = I3 - LPTRP(3) = 1 - L = 3 - ELSE - N1 = I1 - L = 1 - LP1 = 2 - LISTP(L) = N1 - LPTRP(L) = LP1 -C -C Loop on the ordered sequence of visible boundary nodes -C N1 from I1 to I2. -C - 1 LPL = LEND(N1) - N1 = -LIST(LPL) - L = LP1 - LP1 = L+1 - LISTP(L) = N1 - LPTRP(L) = LP1 - IF (N1 .NE. I2 .AND. LP1 .LT. LMAX) GO TO 1 - L = LP1 - LISTP(L) = 0 - LPTRP(L) = 1 - ENDIF -C -C Initialize variables for a loop on arcs N1-N2 opposite P -C in which new 'neighbors' are 'swapped' in. N1 follows -C N2 as a neighbor of P, and LP1 and LP2 are the LISTP -C indexes of N1 and N2. -C - LP2 = 1 - N2 = I1 - LP1 = LPTRP(1) - N1 = LISTP(LP1) -C -C Begin loop: find the node N3 opposite N1->N2. -C - 2 LP = LSTPTR(LEND(N1),N2,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 3 - LP = LPTR(LP) - N3 = ABS(LIST(LP)) -C -C Swap test: Exit the loop if L = LMAX. -C - IF (L .EQ. LMAX) GO TO 4 - DX1 = X(N1) - P(1) - DY1 = Y(N1) - P(2) - DZ1 = Z(N1) - P(3) -C - DX2 = X(N2) - P(1) - DY2 = Y(N2) - P(2) - DZ2 = Z(N2) - P(3) -C - DX3 = X(N3) - P(1) - DY3 = Y(N3) - P(2) - DZ3 = Z(N3) - P(3) - IF ( DX3*(DY2*DZ1 - DY1*DZ2) - - . DY3*(DX2*DZ1 - DX1*DZ2) + - . DZ3*(DX2*DY1 - DX1*DY2) .LE. 0. ) GO TO 3 -C -C Swap: Insert N3 following N2 in the adjacency list for P. -C The two new arcs opposite P must be tested. -C - L = L+1 - LPTRP(LP2) = L - LISTP(L) = N3 - LPTRP(L) = LP1 - LP1 = L - N1 = N3 - GO TO 2 -C -C No swap: Advance to the next arc and test for termination -C on N1 = I1 (LP1 = 1) or N1 followed by 0. -C - 3 IF (LP1 .EQ. 1) GO TO 4 - LP2 = LP1 - N2 = N1 - LP1 = LPTRP(LP1) - N1 = LISTP(LP1) - IF (N1 .EQ. 0) GO TO 4 - GO TO 2 -C -C Set NR and DSR to the index of the nearest node to P and -C an increasing function (negative cosine) of its distance -C from P, respectively. -C - 4 NR = I1 - DSR = -(X(NR)*P(1) + Y(NR)*P(2) + Z(NR)*P(3)) - DO 5 LP = 2,L - N1 = LISTP(LP) - IF (N1 .EQ. 0) GO TO 5 - DS1 = -(X(N1)*P(1) + Y(N1)*P(2) + Z(N1)*P(3)) - IF (DS1 .LT. DSR) THEN - NR = N1 - DSR = DS1 - ENDIF - 5 CONTINUE - DSR = -DSR - IF (DSR .GT. 1.0) DSR = 1.0 - AL = ACOS(DSR) - NEARND = NR - RETURN -C -C Invalid input. -C - 6 NEARND = 0 - RETURN - END - SUBROUTINE OPTIM (X,Y,Z,NA, LIST,LPTR,LEND,NIT, - . IWK, IER) - INTEGER NA, LIST(*), LPTR(*), LEND(*), NIT, IWK(2,NA), - . IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/30/98 -C -C Given a set of NA triangulation arcs, this subroutine -C optimizes the portion of the triangulation consisting of -C the quadrilaterals (pairs of adjacent triangles) which -C have the arcs as diagonals by applying the circumcircle -C test and appropriate swaps to the arcs. -C -C An iteration consists of applying the swap test and -C swaps to all NA arcs in the order in which they are -C stored. The iteration is repeated until no swap occurs -C or NIT iterations have been performed. The bound on the -C number of iterations may be necessary to prevent an -C infinite loop caused by cycling (reversing the effect of a -C previous swap) due to floating point inaccuracy when four -C or more nodes are nearly cocircular. -C -C -C On input: -C -C X,Y,Z = Arrays containing the nodal coordinates. -C -C NA = Number of arcs in the set. NA .GE. 0. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C NIT = Maximum number of iterations to be performed. -C NIT = 4*NA should be sufficient. NIT .GE. 1. -C -C IWK = Integer array dimensioned 2 by NA containing -C the nodal indexes of the arc endpoints (pairs -C of endpoints are stored in columns). -C -C On output: -C -C LIST,LPTR,LEND = Updated triangulation data struc- -C ture reflecting the swaps. -C -C NIT = Number of iterations performed. -C -C IWK = Endpoint indexes of the new set of arcs -C reflecting the swaps. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if a swap occurred on the last of -C MAXIT iterations, where MAXIT is the -C value of NIT on input. The new set -C of arcs is not necessarily optimal -C in this case. -C IER = 2 if NA < 0 or NIT < 1 on input. -C IER = 3 if IWK(2,I) is not a neighbor of -C IWK(1,I) for some I in the range 1 -C to NA. A swap may have occurred in -C this case. -C IER = 4 if a zero pointer was returned by -C Subroutine SWAP. -C -C Modules required by OPTIM: LSTPTR, SWAP, SWPTST -C -C Intrinsic function called by OPTIM: ABS -C -C*********************************************************** -C - INTEGER I, IO1, IO2, ITER, LP, LP21, LPL, LPP, MAXIT, - . N1, N2, NNA - LOGICAL SWPTST - LOGICAL SWP -C -C Local parameters: -C -C I = Column index for IWK -C IO1,IO2 = Nodal indexes of the endpoints of an arc in IWK -C ITER = Iteration count -C LP = LIST pointer -C LP21 = Parameter returned by SWAP (not used) -C LPL = Pointer to the last neighbor of IO1 -C LPP = Pointer to the node preceding IO2 as a neighbor -C of IO1 -C MAXIT = Input value of NIT -C N1,N2 = Nodes opposite IO1->IO2 and IO2->IO1, -C respectively -C NNA = Local copy of NA -C SWP = Flag set to TRUE iff a swap occurs in the -C optimization loop -C - NNA = NA - MAXIT = NIT - IF (NNA .LT. 0 .OR. MAXIT .LT. 1) GO TO 7 -C -C Initialize iteration count ITER and test for NA = 0. -C - ITER = 0 - IF (NNA .EQ. 0) GO TO 5 -C -C Top of loop -- -C SWP = TRUE iff a swap occurred in the current iteration. -C - 1 IF (ITER .EQ. MAXIT) GO TO 6 - ITER = ITER + 1 - SWP = .FALSE. -C -C Inner loop on arcs IO1-IO2 -- -C - DO 4 I = 1,NNA - IO1 = IWK(1,I) - IO2 = IWK(2,I) -C -C Set N1 and N2 to the nodes opposite IO1->IO2 and -C IO2->IO1, respectively. Determine the following: -C -C LPL = pointer to the last neighbor of IO1, -C LP = pointer to IO2 as a neighbor of IO1, and -C LPP = pointer to the node N2 preceding IO2. -C - LPL = LEND(IO1) - LPP = LPL - LP = LPTR(LPP) - 2 IF (LIST(LP) .EQ. IO2) GO TO 3 - LPP = LP - LP = LPTR(LPP) - IF (LP .NE. LPL) GO TO 2 -C -C IO2 should be the last neighbor of IO1. Test for no -C arc and bypass the swap test if IO1 is a boundary -C node. -C - IF (ABS(LIST(LP)) .NE. IO2) GO TO 8 - IF (LIST(LP) .LT. 0) GO TO 4 -C -C Store N1 and N2, or bypass the swap test if IO1 is a -C boundary node and IO2 is its first neighbor. -C - 3 N2 = LIST(LPP) - IF (N2 .LT. 0) GO TO 4 - LP = LPTR(LP) - N1 = ABS(LIST(LP)) -C -C Test IO1-IO2 for a swap, and update IWK if necessary. -C - IF ( .NOT. SWPTST(N1,N2,IO1,IO2,X,Y,Z) ) GO TO 4 - CALL SWAP (N1,N2,IO1,IO2, LIST,LPTR,LEND, LP21) - IF (LP21 .EQ. 0) GO TO 9 - SWP = .TRUE. - IWK(1,I) = N1 - IWK(2,I) = N2 - 4 CONTINUE - IF (SWP) GO TO 1 -C -C Successful termination. -C - 5 NIT = ITER - IER = 0 - RETURN -C -C MAXIT iterations performed without convergence. -C - 6 NIT = MAXIT - IER = 1 - RETURN -C -C Invalid input parameter. -C - 7 NIT = 0 - IER = 2 - RETURN -C -C IO2 is not a neighbor of IO1. -C - 8 NIT = ITER - IER = 3 - RETURN -C -C Zero pointer returned by SWAP. -C - 9 NIT = ITER - IER = 4 - RETURN - END - SUBROUTINE SCOORD (PX,PY,PZ, PLAT,PLON,PNRM) - REAL PX, PY, PZ, PLAT, PLON, PNRM -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 08/27/90 -C -C This subroutine converts a point P from Cartesian coor- -C dinates to spherical coordinates. -C -C -C On input: -C -C PX,PY,PZ = Cartesian coordinates of P. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C PLAT = Latitude of P in the range -PI/2 to PI/2, or -C 0 if PNRM = 0. PLAT should be scaled by -C 180/PI to obtain the value in degrees. -C -C PLON = Longitude of P in the range -PI to PI, or 0 -C if P lies on the Z-axis. PLON should be -C scaled by 180/PI to obtain the value in -C degrees. -C -C PNRM = Magnitude (Euclidean norm) of P. -C -C Modules required by SCOORD: None -C -C Intrinsic functions called by SCOORD: ASIN, ATAN2, SQRT -C -C*********************************************************** -C - PNRM = SQRT(PX*PX + PY*PY + PZ*PZ) - IF (PX .NE. 0. .OR. PY .NE. 0.) THEN - PLON = ATAN2(PY,PX) - ELSE - PLON = 0. - ENDIF - IF (PNRM .NE. 0.) THEN - PLAT = ASIN(PZ/PNRM) - ELSE - PLAT = 0. - ENDIF - RETURN - END - REAL FUNCTION STORE (X) - REAL X -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 05/09/92 -C -C This function forces its argument X to be stored in a -C memory location, thus providing a means of determining -C floating point number characteristics (such as the machine -C precision) when it is necessary to avoid computation in -C high precision registers. -C -C -C On input: -C -C X = Value to be stored. -C -C X is not altered by this function. -C -C On output: -C -C STORE = Value of X after it has been stored and -C possibly truncated or rounded to the single -C precision word length. -C -C Modules required by STORE: None -C -C*********************************************************** -C - REAL Y - COMMON/STCOM/Y - Y = X - STORE = Y - RETURN - END - SUBROUTINE SWAP (IN1,IN2,IO1,IO2, LIST,LPTR, - . LEND, LP21) - INTEGER IN1, IN2, IO1, IO2, LIST(*), LPTR(*), LEND(*), - . LP21 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/22/98 -C -C Given a triangulation of a set of points on the unit -C sphere, this subroutine replaces a diagonal arc in a -C strictly convex quadrilateral (defined by a pair of adja- -C cent triangles) with the other diagonal. Equivalently, a -C pair of adjacent triangles is replaced by another pair -C having the same union. -C -C -C On input: -C -C IN1,IN2,IO1,IO2 = Nodal indexes of the vertices of -C the quadrilateral. IO1-IO2 is re- -C placed by IN1-IN2. (IO1,IO2,IN1) -C and (IO2,IO1,IN2) must be trian- -C gles on input. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LIST,LPTR,LEND = Data structure updated with the -C swap -- triangles (IO1,IO2,IN1) and -C (IO2,IO1,IN2) are replaced by -C (IN1,IN2,IO2) and (IN2,IN1,IO1) -C unless LP21 = 0. -C -C LP21 = Index of IN1 as a neighbor of IN2 after the -C swap is performed unless IN1 and IN2 are -C adjacent on input, in which case LP21 = 0. -C -C Module required by SWAP: LSTPTR -C -C Intrinsic function called by SWAP: ABS -C -C*********************************************************** -C - INTEGER LSTPTR - INTEGER LP, LPH, LPSAV -C -C Local parameters: -C -C LP,LPH,LPSAV = LIST pointers -C -C -C Test for IN1 and IN2 adjacent. -C - LP = LSTPTR(LEND(IN1),IN2,LIST,LPTR) - IF (ABS(LIST(LP)) .EQ. IN2) THEN - LP21 = 0 - RETURN - ENDIF -C -C Delete IO2 as a neighbor of IO1. -C - LP = LSTPTR(LEND(IO1),IN2,LIST,LPTR) - LPH = LPTR(LP) - LPTR(LP) = LPTR(LPH) -C -C If IO2 is the last neighbor of IO1, make IN2 the -C last neighbor. -C - IF (LEND(IO1) .EQ. LPH) LEND(IO1) = LP -C -C Insert IN2 as a neighbor of IN1 following IO1 -C using the hole created above. -C - LP = LSTPTR(LEND(IN1),IO1,LIST,LPTR) - LPSAV = LPTR(LP) - LPTR(LP) = LPH - LIST(LPH) = IN2 - LPTR(LPH) = LPSAV -C -C Delete IO1 as a neighbor of IO2. -C - LP = LSTPTR(LEND(IO2),IN1,LIST,LPTR) - LPH = LPTR(LP) - LPTR(LP) = LPTR(LPH) -C -C If IO1 is the last neighbor of IO2, make IN1 the -C last neighbor. -C - IF (LEND(IO2) .EQ. LPH) LEND(IO2) = LP -C -C Insert IN1 as a neighbor of IN2 following IO2. -C - LP = LSTPTR(LEND(IN2),IO2,LIST,LPTR) - LPSAV = LPTR(LP) - LPTR(LP) = LPH - LIST(LPH) = IN1 - LPTR(LPH) = LPSAV - LP21 = LPH - RETURN - END - LOGICAL FUNCTION SWPTST (N1,N2,N3,N4,X,Y,Z) - INTEGER N1, N2, N3, N4 - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 03/29/91 -C -C This function decides whether or not to replace a -C diagonal arc in a quadrilateral with the other diagonal. -C The decision will be to swap (SWPTST = TRUE) if and only -C if N4 lies above the plane (in the half-space not contain- -C ing the origin) defined by (N1,N2,N3), or equivalently, if -C the projection of N4 onto this plane is interior to the -C circumcircle of (N1,N2,N3). The decision will be for no -C swap if the quadrilateral is not strictly convex. -C -C -C On input: -C -C N1,N2,N3,N4 = Indexes of the four nodes defining the -C quadrilateral with N1 adjacent to N2, -C and (N1,N2,N3) in counterclockwise -C order. The arc connecting N1 to N2 -C should be replaced by an arc connec- -C ting N3 to N4 if SWPTST = TRUE. Refer -C to Subroutine SWAP. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. (X(I),Y(I),Z(I)) -C define node I for I = N1, N2, N3, and N4. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C SWPTST = TRUE if and only if the arc connecting N1 -C and N2 should be swapped for an arc con- -C necting N3 and N4. -C -C Modules required by SWPTST: None -C -C*********************************************************** -C - REAL DX1, DX2, DX3, DY1, DY2, DY3, DZ1, DZ2, DZ3, - . X4, Y4, Z4 -C -C Local parameters: -C -C DX1,DY1,DZ1 = Coordinates of N4->N1 -C DX2,DY2,DZ2 = Coordinates of N4->N2 -C DX3,DY3,DZ3 = Coordinates of N4->N3 -C X4,Y4,Z4 = Coordinates of N4 -C - X4 = X(N4) - Y4 = Y(N4) - Z4 = Z(N4) - DX1 = X(N1) - X4 - DX2 = X(N2) - X4 - DX3 = X(N3) - X4 - DY1 = Y(N1) - Y4 - DY2 = Y(N2) - Y4 - DY3 = Y(N3) - Y4 - DZ1 = Z(N1) - Z4 - DZ2 = Z(N2) - Z4 - DZ3 = Z(N3) - Z4 -C -C N4 lies above the plane of (N1,N2,N3) iff N3 lies above -C the plane of (N2,N1,N4) iff Det(N3-N4,N2-N4,N1-N4) = -C (N3-N4,N2-N4 X N1-N4) > 0. -C - SWPTST = DX3*(DY2*DZ1 - DY1*DZ2) - . -DY3*(DX2*DZ1 - DX1*DZ2) - . +DZ3*(DX2*DY1 - DX1*DY2) .GT. 0. - RETURN - END - SUBROUTINE TRANS (N,RLAT,RLON, X,Y,Z) - INTEGER N - REAL RLAT(N), RLON(N), X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 04/08/90 -C -C This subroutine transforms spherical coordinates into -C Cartesian coordinates on the unit sphere for input to -C Subroutine TRMESH. Storage for X and Y may coincide with -C storage for RLAT and RLON if the latter need not be saved. -C -C -C On input: -C -C N = Number of nodes (points on the unit sphere) -C whose coordinates are to be transformed. -C -C RLAT = Array of length N containing latitudinal -C coordinates of the nodes in radians. -C -C RLON = Array of length N containing longitudinal -C coordinates of the nodes in radians. -C -C The above parameters are not altered by this routine. -C -C X,Y,Z = Arrays of length at least N. -C -C On output: -C -C X,Y,Z = Cartesian coordinates in the range -1 to 1. -C X(I)**2 + Y(I)**2 + Z(I)**2 = 1 for I = 1 -C to N. -C -C Modules required by TRANS: None -C -C Intrinsic functions called by TRANS: COS, SIN -C -C*********************************************************** -C - INTEGER I, NN - REAL COSPHI, PHI, THETA -C -C Local parameters: -C -C COSPHI = cos(PHI) -C I = DO-loop index -C NN = Local copy of N -C PHI = Latitude -C THETA = Longitude -C - NN = N - DO 1 I = 1,NN - PHI = RLAT(I) - THETA = RLON(I) - COSPHI = COS(PHI) - X(I) = COSPHI*COS(THETA) - Y(I) = COSPHI*SIN(THETA) - Z(I) = SIN(PHI) - 1 CONTINUE - RETURN - END - SUBROUTINE TRFIND (NST,P,N,X,Y,Z,LIST,LPTR,LEND, B1, - . B2,B3,I1,I2,I3) - INTEGER NST, N, LIST(*), LPTR(*), LEND(N), I1, I2, I3 - REAL P(3), X(N), Y(N), Z(N), B1, B2, B3 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 11/30/99 -C -C This subroutine locates a point P relative to a triangu- -C lation created by Subroutine TRMESH. If P is contained in -C a triangle, the three vertex indexes and barycentric coor- -C dinates are returned. Otherwise, the indexes of the -C visible boundary nodes are returned. -C -C -C On input: -C -C NST = Index of a node at which TRFIND begins its -C search. Search time depends on the proximity -C of this node to P. -C -C P = Array of length 3 containing the x, y, and z -C coordinates (in that order) of the point P to be -C located. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the triangulation nodes (unit -C vectors). (X(I),Y(I),Z(I)) defines node I -C for I = 1 to N. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C B1,B2,B3 = Unnormalized barycentric coordinates of -C the central projection of P onto the un- -C derlying planar triangle if P is in the -C convex hull of the nodes. These parame- -C ters are not altered if I1 = 0. -C -C I1,I2,I3 = Counterclockwise-ordered vertex indexes -C of a triangle containing P if P is con- -C tained in a triangle. If P is not in the -C convex hull of the nodes, I1 and I2 are -C the rightmost and leftmost (boundary) -C nodes that are visible from P, and -C I3 = 0. (If all boundary nodes are vis- -C ible from P, then I1 and I2 coincide.) -C I1 = I2 = I3 = 0 if P and all of the -C nodes are coplanar (lie on a common great -C circle. -C -C Modules required by TRFIND: JRAND, LSTPTR, STORE -C -C Intrinsic function called by TRFIND: ABS -C -C*********************************************************** -C - INTEGER JRAND, LSTPTR - INTEGER IX, IY, IZ, LP, N0, N1, N1S, N2, N2S, N3, N4, - . NEXT, NF, NL - REAL STORE - REAL DET, EPS, PTN1, PTN2, Q(3), S12, TOL, XP, YP, - . ZP - REAL X0, X1, X2, Y0, Y1, Y2, Z0, Z1, Z2 -C - SAVE IX, IY, IZ - DATA IX/1/, IY/2/, IZ/3/ -C -C Local parameters: -C -C EPS = Machine precision -C IX,IY,IZ = Integer seeds for JRAND -C LP = LIST pointer -C N0,N1,N2 = Nodes in counterclockwise order defining a -C cone (with vertex N0) containing P, or end- -C points of a boundary edge such that P Right -C N1->N2 -C N1S,N2S = Initially-determined values of N1 and N2 -C N3,N4 = Nodes opposite N1->N2 and N2->N1, respectively -C NEXT = Candidate for I1 or I2 when P is exterior -C NF,NL = First and last neighbors of N0, or first -C (rightmost) and last (leftmost) nodes -C visible from P when P is exterior to the -C triangulation -C PTN1 = Scalar product -C PTN2 = Scalar product -C Q = (N2 X N1) X N2 or N1 X (N2 X N1) -- used in -C the boundary traversal when P is exterior -C S12 = Scalar product -C TOL = Tolerance (multiple of EPS) defining an upper -C bound on the magnitude of a negative bary- -C centric coordinate (B1 or B2) for P in a -C triangle -- used to avoid an infinite number -C of restarts with 0 <= B3 < EPS and B1 < 0 or -C B2 < 0 but small in magnitude -C XP,YP,ZP = Local variables containing P(1), P(2), and P(3) -C X0,Y0,Z0 = Dummy arguments for DET -C X1,Y1,Z1 = Dummy arguments for DET -C X2,Y2,Z2 = Dummy arguments for DET -C -C Statement function: -C -C DET(X1,...,Z0) .GE. 0 if and only if (X0,Y0,Z0) is in the -C (closed) left hemisphere defined by -C the plane containing (0,0,0), -C (X1,Y1,Z1), and (X2,Y2,Z2), where -C left is defined relative to an ob- -C server at (X1,Y1,Z1) facing -C (X2,Y2,Z2). -C - DET (X1,Y1,Z1,X2,Y2,Z2,X0,Y0,Z0) = X0*(Y1*Z2-Y2*Z1) - . - Y0*(X1*Z2-X2*Z1) + Z0*(X1*Y2-X2*Y1) -C -C Initialize variables. -C - XP = P(1) - YP = P(2) - ZP = P(3) - N0 = NST - IF (N0 .LT. 1 .OR. N0 .GT. N) - . N0 = JRAND(N, IX,IY,IZ ) -C -C Compute the relative machine precision EPS and TOL. -C - EPS = 1.E0 - 1 EPS = EPS/2.E0 - IF (STORE(EPS+1.E0) .GT. 1.E0) GO TO 1 - EPS = 2.E0*EPS - TOL = 100.E0*EPS -C -C Set NF and NL to the first and last neighbors of N0, and -C initialize N1 = NF. -C - 2 LP = LEND(N0) - NL = LIST(LP) - LP = LPTR(LP) - NF = LIST(LP) - N1 = NF -C -C Find a pair of adjacent neighbors N1,N2 of N0 that define -C a wedge containing P: P LEFT N0->N1 and P RIGHT N0->N2. -C - IF (NL .GT. 0) THEN -C -C N0 is an interior node. Find N1. -C - 3 IF ( DET(X(N0),Y(N0),Z(N0),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) .LT. 0. ) THEN - LP = LPTR(LP) - N1 = LIST(LP) - IF (N1 .EQ. NL) GO TO 6 - GO TO 3 - ENDIF - ELSE -C -C N0 is a boundary node. Test for P exterior. -C - NL = -NL - IF ( DET(X(N0),Y(N0),Z(N0),X(NF),Y(NF),Z(NF), - . XP,YP,ZP) .LT. 0. ) THEN -C -C P is to the right of the boundary edge N0->NF. -C - N1 = N0 - N2 = NF - GO TO 9 - ENDIF - IF ( DET(X(NL),Y(NL),Z(NL),X(N0),Y(N0),Z(N0), - . XP,YP,ZP) .LT. 0. ) THEN -C -C P is to the right of the boundary edge NL->N0. -C - N1 = NL - N2 = N0 - GO TO 9 - ENDIF - ENDIF -C -C P is to the left of arcs N0->N1 and NL->N0. Set N2 to the -C next neighbor of N0 (following N1). -C - 4 LP = LPTR(LP) - N2 = ABS(LIST(LP)) - IF ( DET(X(N0),Y(N0),Z(N0),X(N2),Y(N2),Z(N2), - . XP,YP,ZP) .LT. 0. ) GO TO 7 - N1 = N2 - IF (N1 .NE. NL) GO TO 4 - IF ( DET(X(N0),Y(N0),Z(N0),X(NF),Y(NF),Z(NF), - . XP,YP,ZP) .LT. 0. ) GO TO 6 -C -C P is left of or on arcs N0->NB for all neighbors NB -C of N0. Test for P = +/-N0. -C - IF (STORE(ABS(X(N0)*XP + Y(N0)*YP + Z(N0)*ZP)) - . .LT. 1.0-4.0*EPS) THEN -C -C All points are collinear iff P Left NB->N0 for all -C neighbors NB of N0. Search the neighbors of N0. -C Note: N1 = NL and LP points to NL. -C - 5 IF ( DET(X(N1),Y(N1),Z(N1),X(N0),Y(N0),Z(N0), - . XP,YP,ZP) .GE. 0. ) THEN - LP = LPTR(LP) - N1 = ABS(LIST(LP)) - IF (N1 .EQ. NL) GO TO 14 - GO TO 5 - ENDIF - ENDIF -C -C P is to the right of N1->N0, or P = +/-N0. Set N0 to N1 -C and start over. -C - N0 = N1 - GO TO 2 -C -C P is between arcs N0->N1 and N0->NF. -C - 6 N2 = NF -C -C P is contained in a wedge defined by geodesics N0-N1 and -C N0-N2, where N1 is adjacent to N2. Save N1 and N2 to -C test for cycling. -C - 7 N3 = N0 - N1S = N1 - N2S = N2 -C -C Top of edge-hopping loop: -C - 8 B3 = DET(X(N1),Y(N1),Z(N1),X(N2),Y(N2),Z(N2),XP,YP,ZP) - IF (B3 .LT. 0.) THEN -C -C Set N4 to the first neighbor of N2 following N1 (the -C node opposite N2->N1) unless N1->N2 is a boundary arc. -C - LP = LSTPTR(LEND(N2),N1,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 9 - LP = LPTR(LP) - N4 = ABS(LIST(LP)) -C -C Define a new arc N1->N2 which intersects the geodesic -C N0-P. -C - IF ( DET(X(N0),Y(N0),Z(N0),X(N4),Y(N4),Z(N4), - . XP,YP,ZP) .LT. 0. ) THEN - N3 = N2 - N2 = N4 - N1S = N1 - IF (N2 .NE. N2S .AND. N2 .NE. N0) GO TO 8 - ELSE - N3 = N1 - N1 = N4 - N2S = N2 - IF (N1 .NE. N1S .AND. N1 .NE. N0) GO TO 8 - ENDIF -C -C The starting node N0 or edge N1-N2 was encountered -C again, implying a cycle (infinite loop). Restart -C with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF -C -C P is in (N1,N2,N3) unless N0, N1, N2, and P are collinear -C or P is close to -N0. -C - IF (B3 .GE. EPS) THEN -C -C B3 .NE. 0. -C - B1 = DET(X(N2),Y(N2),Z(N2),X(N3),Y(N3),Z(N3), - . XP,YP,ZP) - B2 = DET(X(N3),Y(N3),Z(N3),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) - IF (B1 .LT. -TOL .OR. B2 .LT. -TOL) THEN -C -C Restart with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF - ELSE -C -C B3 = 0 and thus P lies on N1->N2. Compute -C B1 = Det(P,N2 X N1,N2) and B2 = Det(P,N1,N2 X N1). -C - B3 = 0. - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - PTN1 = XP*X(N1) + YP*Y(N1) + ZP*Z(N1) - PTN2 = XP*X(N2) + YP*Y(N2) + ZP*Z(N2) - B1 = PTN1 - S12*PTN2 - B2 = PTN2 - S12*PTN1 - IF (B1 .LT. -TOL .OR. B2 .LT. -TOL) THEN -C -C Restart with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF - ENDIF -C -C P is in (N1,N2,N3). -C - I1 = N1 - I2 = N2 - I3 = N3 - IF (B1 .LT. 0.0) B1 = 0.0 - IF (B2 .LT. 0.0) B2 = 0.0 - RETURN -C -C P Right N1->N2, where N1->N2 is a boundary edge. -C Save N1 and N2, and set NL = 0 to indicate that -C NL has not yet been found. -C - 9 N1S = N1 - N2S = N2 - NL = 0 -C -C Counterclockwise Boundary Traversal: -C - 10 LP = LEND(N2) - LP = LPTR(LP) - NEXT = LIST(LP) - IF ( DET(X(N2),Y(N2),Z(N2),X(NEXT),Y(NEXT),Z(NEXT), - . XP,YP,ZP) .GE. 0. ) THEN -C -C N2 is the rightmost visible node if P Forward N2->N1 -C or NEXT Forward N2->N1. Set Q to (N2 X N1) X N2. -C - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - Q(1) = X(N1) - S12*X(N2) - Q(2) = Y(N1) - S12*Y(N2) - Q(3) = Z(N1) - S12*Z(N2) - IF (XP*Q(1) + YP*Q(2) + ZP*Q(3) .GE. 0.) GO TO 11 - IF (X(NEXT)*Q(1) + Y(NEXT)*Q(2) + Z(NEXT)*Q(3) - . .GE. 0.) GO TO 11 -C -C N1, N2, NEXT, and P are nearly collinear, and N2 is -C the leftmost visible node. -C - NL = N2 - ENDIF -C -C Bottom of counterclockwise loop: -C - N1 = N2 - N2 = NEXT - IF (N2 .NE. N1S) GO TO 10 -C -C All boundary nodes are visible from P. -C - I1 = N1S - I2 = N1S - I3 = 0 - RETURN -C -C N2 is the rightmost visible node. -C - 11 NF = N2 - IF (NL .EQ. 0) THEN -C -C Restore initial values of N1 and N2, and begin the search -C for the leftmost visible node. -C - N2 = N2S - N1 = N1S -C -C Clockwise Boundary Traversal: -C - 12 LP = LEND(N1) - NEXT = -LIST(LP) - IF ( DET(X(NEXT),Y(NEXT),Z(NEXT),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) .GE. 0. ) THEN -C -C N1 is the leftmost visible node if P or NEXT is -C forward of N1->N2. Compute Q = N1 X (N2 X N1). -C - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - Q(1) = X(N2) - S12*X(N1) - Q(2) = Y(N2) - S12*Y(N1) - Q(3) = Z(N2) - S12*Z(N1) - IF (XP*Q(1) + YP*Q(2) + ZP*Q(3) .GE. 0.) GO TO 13 - IF (X(NEXT)*Q(1) + Y(NEXT)*Q(2) + Z(NEXT)*Q(3) - . .GE. 0.) GO TO 13 -C -C P, NEXT, N1, and N2 are nearly collinear and N1 is the -C rightmost visible node. -C - NF = N1 - ENDIF -C -C Bottom of clockwise loop: -C - N2 = N1 - N1 = NEXT - IF (N1 .NE. N1S) GO TO 12 -C -C All boundary nodes are visible from P. -C - I1 = N1 - I2 = N1 - I3 = 0 - RETURN -C -C N1 is the leftmost visible node. -C - 13 NL = N1 - ENDIF -C -C NF and NL have been found. -C - I1 = NF - I2 = NL - I3 = 0 - RETURN -C -C All points are collinear (coplanar). -C - 14 I1 = 0 - I2 = 0 - I3 = 0 - RETURN - END - SUBROUTINE TRLIST (N,LIST,LPTR,LEND,NROW, NT,LTRI,IER) - INTEGER N, LIST(*), LPTR(*), LEND(N), NROW, NT, - . LTRI(NROW,*), IER -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/20/96 -C -C This subroutine converts a triangulation data structure -C from the linked list created by Subroutine TRMESH to a -C triangle list. -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C LIST,LPTR,LEND = Linked list data structure defin- -C ing the triangulation. Refer to -C Subroutine TRMESH. -C -C NROW = Number of rows (entries per triangle) re- -C served for the triangle list LTRI. The value -C must be 6 if only the vertex indexes and -C neighboring triangle indexes are to be -C stored, or 9 if arc indexes are also to be -C assigned and stored. Refer to LTRI. -C -C The above parameters are not altered by this routine. -C -C LTRI = Integer array of length at least NROW*NT, -C where NT is at most 2N-4. (A sufficient -C length is 12N if NROW=6 or 18N if NROW=9.) -C -C On output: -C -C NT = Number of triangles in the triangulation unless -C IER .NE. 0, in which case NT = 0. NT = 2N-NB-2 -C if NB .GE. 3 or 2N-4 if NB = 0, where NB is the -C number of boundary nodes. -C -C LTRI = NROW by NT array whose J-th column contains -C the vertex nodal indexes (first three rows), -C neighboring triangle indexes (second three -C rows), and, if NROW = 9, arc indexes (last -C three rows) associated with triangle J for -C J = 1,...,NT. The vertices are ordered -C counterclockwise with the first vertex taken -C to be the one with smallest index. Thus, -C LTRI(2,J) and LTRI(3,J) are larger than -C LTRI(1,J) and index adjacent neighbors of -C node LTRI(1,J). For I = 1,2,3, LTRI(I+3,J) -C and LTRI(I+6,J) index the triangle and arc, -C respectively, which are opposite (not shared -C by) node LTRI(I,J), with LTRI(I+3,J) = 0 if -C LTRI(I+6,J) indexes a boundary arc. Vertex -C indexes range from 1 to N, triangle indexes -C from 0 to NT, and, if included, arc indexes -C from 1 to NA, where NA = 3N-NB-3 if NB .GE. 3 -C or 3N-6 if NB = 0. The triangles are or- -C dered on first (smallest) vertex indexes. -C -C IER = Error indicator. -C IER = 0 if no errors were encountered. -C IER = 1 if N or NROW is outside its valid -C range on input. -C IER = 2 if the triangulation data structure -C (LIST,LPTR,LEND) is invalid. Note, -C however, that these arrays are not -C completely tested for validity. -C -C Modules required by TRLIST: None -C -C Intrinsic function called by TRLIST: ABS -C -C*********************************************************** -C - INTEGER I, I1, I2, I3, ISV, J, KA, KN, KT, LP, LP2, - . LPL, LPLN1, N1, N2, N3, NM2 - LOGICAL ARCS -C -C Local parameters: -C -C ARCS = Logical variable with value TRUE iff are -C indexes are to be stored -C I,J = LTRI row indexes (1 to 3) associated with -C triangles KT and KN, respectively -C I1,I2,I3 = Nodal indexes of triangle KN -C ISV = Variable used to permute indexes I1,I2,I3 -C KA = Arc index and number of currently stored arcs -C KN = Index of the triangle that shares arc I1-I2 -C with KT -C KT = Triangle index and number of currently stored -C triangles -C LP = LIST pointer -C LP2 = Pointer to N2 as a neighbor of N1 -C LPL = Pointer to the last neighbor of I1 -C LPLN1 = Pointer to the last neighbor of N1 -C N1,N2,N3 = Nodal indexes of triangle KT -C NM2 = N-2 -C -C -C Test for invalid input parameters. -C - IF (N .LT. 3 .OR. (NROW .NE. 6 .AND. NROW .NE. 9)) - . GO TO 11 -C -C Initialize parameters for loop on triangles KT = (N1,N2, -C N3), where N1 < N2 and N1 < N3. -C -C ARCS = TRUE iff arc indexes are to be stored. -C KA,KT = Numbers of currently stored arcs and triangles. -C NM2 = Upper bound on candidates for N1. -C - ARCS = NROW .EQ. 9 - KA = 0 - KT = 0 - NM2 = N-2 -C -C Loop on nodes N1. -C - DO 9 N1 = 1,NM2 -C -C Loop on pairs of adjacent neighbors (N2,N3). LPLN1 points -C to the last neighbor of N1, and LP2 points to N2. -C - LPLN1 = LEND(N1) - LP2 = LPLN1 - 1 LP2 = LPTR(LP2) - N2 = LIST(LP2) - LP = LPTR(LP2) - N3 = ABS(LIST(LP)) - IF (N2 .LT. N1 .OR. N3 .LT. N1) GO TO 8 -C -C Add a new triangle KT = (N1,N2,N3). -C - KT = KT + 1 - LTRI(1,KT) = N1 - LTRI(2,KT) = N2 - LTRI(3,KT) = N3 -C -C Loop on triangle sides (I2,I1) with neighboring triangles -C KN = (I1,I2,I3). -C - DO 7 I = 1,3 - IF (I .EQ. 1) THEN - I1 = N3 - I2 = N2 - ELSEIF (I .EQ. 2) THEN - I1 = N1 - I2 = N3 - ELSE - I1 = N2 - I2 = N1 - ENDIF -C -C Set I3 to the neighbor of I1 that follows I2 unless -C I2->I1 is a boundary arc. -C - LPL = LEND(I1) - LP = LPTR(LPL) - 2 IF (LIST(LP) .EQ. I2) GO TO 3 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 2 -C -C I2 is the last neighbor of I1 unless the data structure -C is invalid. Bypass the search for a neighboring -C triangle if I2->I1 is a boundary arc. -C - IF (ABS(LIST(LP)) .NE. I2) GO TO 12 - KN = 0 - IF (LIST(LP) .LT. 0) GO TO 6 -C -C I2->I1 is not a boundary arc, and LP points to I2 as -C a neighbor of I1. -C - 3 LP = LPTR(LP) - I3 = ABS(LIST(LP)) -C -C Find J such that LTRI(J,KN) = I3 (not used if KN > KT), -C and permute the vertex indexes of KN so that I1 is -C smallest. -C - IF (I1 .LT. I2 .AND. I1 .LT. I3) THEN - J = 3 - ELSEIF (I2 .LT. I3) THEN - J = 2 - ISV = I1 - I1 = I2 - I2 = I3 - I3 = ISV - ELSE - J = 1 - ISV = I1 - I1 = I3 - I3 = I2 - I2 = ISV - ENDIF -C -C Test for KN > KT (triangle index not yet assigned). -C - IF (I1 .GT. N1) GO TO 7 -C -C Find KN, if it exists, by searching the triangle list in -C reverse order. -C - DO 4 KN = KT-1,1,-1 - IF (LTRI(1,KN) .EQ. I1 .AND. LTRI(2,KN) .EQ. - . I2 .AND. LTRI(3,KN) .EQ. I3) GO TO 5 - 4 CONTINUE - GO TO 7 -C -C Store KT as a neighbor of KN. -C - 5 LTRI(J+3,KN) = KT -C -C Store KN as a neighbor of KT, and add a new arc KA. -C - 6 LTRI(I+3,KT) = KN - IF (ARCS) THEN - KA = KA + 1 - LTRI(I+6,KT) = KA - IF (KN .NE. 0) LTRI(J+6,KN) = KA - ENDIF - 7 CONTINUE -C -C Bottom of loop on triangles. -C - 8 IF (LP2 .NE. LPLN1) GO TO 1 - 9 CONTINUE -C -C No errors encountered. -C - NT = KT - IER = 0 - RETURN -C -C Invalid input parameter. -C - 11 NT = 0 - IER = 1 - RETURN -C -C Invalid triangulation data structure: I1 is a neighbor of -C I2, but I2 is not a neighbor of I1. -C - 12 NT = 0 - IER = 2 - RETURN - END - SUBROUTINE TRLPRT (N,X,Y,Z,IFLAG,NROW,NT,LTRI,LOUT) - INTEGER N, IFLAG, NROW, NT, LTRI(NROW,NT), LOUT - REAL X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/02/98 -C -C This subroutine prints the triangle list created by Sub- -C routine TRLIST and, optionally, the nodal coordinates -C (either latitude and longitude or Cartesian coordinates) -C on logical unit LOUT. The numbers of boundary nodes, -C triangles, and arcs are also printed. -C -C -C On input: -C -C N = Number of nodes in the triangulation. -C 3 .LE. N .LE. 9999. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes if IFLAG = 0, or -C (X and Y only) arrays of length N containing -C longitude and latitude, respectively, if -C IFLAG > 0, or unused dummy parameters if -C IFLAG < 0. -C -C IFLAG = Nodal coordinate option indicator: -C IFLAG = 0 if X, Y, and Z (assumed to contain -C Cartesian coordinates) are to be -C printed (to 6 decimal places). -C IFLAG > 0 if only X and Y (assumed to con- -C tain longitude and latitude) are -C to be printed (to 6 decimal -C places). -C IFLAG < 0 if only the adjacency lists are to -C be printed. -C -C NROW = Number of rows (entries per triangle) re- -C served for the triangle list LTRI. The value -C must be 6 if only the vertex indexes and -C neighboring triangle indexes are stored, or 9 -C if arc indexes are also stored. -C -C NT = Number of triangles in the triangulation. -C 1 .LE. NT .LE. 9999. -C -C LTRI = NROW by NT array whose J-th column contains -C the vertex nodal indexes (first three rows), -C neighboring triangle indexes (second three -C rows), and, if NROW = 9, arc indexes (last -C three rows) associated with triangle J for -C J = 1,...,NT. -C -C LOUT = Logical unit number for output. If LOUT is -C not in the range 0 to 99, output is written -C to unit 6. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C The triangle list and nodal coordinates (as specified by -C IFLAG) are written to unit LOUT. -C -C Modules required by TRLPRT: None -C -C*********************************************************** -C - INTEGER I, K, LUN, NA, NB, NL, NLMAX, NMAX - DATA NMAX/9999/, NLMAX/58/ -C -C Local parameters: -C -C I = DO-loop, nodal index, and row index for LTRI -C K = DO-loop and triangle index -C LUN = Logical unit number for output -C NA = Number of triangulation arcs -C NB = Number of boundary nodes -C NL = Number of lines printed on the current page -C NLMAX = Maximum number of print lines per page (except -C for the last page which may have two addi- -C tional lines) -C NMAX = Maximum value of N and NT (4-digit format) -C - LUN = LOUT - IF (LUN .LT. 0 .OR. LUN .GT. 99) LUN = 6 -C -C Print a heading and test for invalid input. -C -C open(18,file='nodes.dat',STATUS= 'UNKNOWN') -C do ig = 1,N -C read(18,*) x(ig),y(ig),z(ig) -C enddo -C close(18) - - WRITE (LUN,100) N - NL = 3 - IF (N .LT. 3 .OR. N .GT. NMAX .OR. - . (NROW .NE. 6 .AND. NROW .NE. 9) .OR. - . NT .LT. 1 .OR. NT .GT. NMAX) THEN -C -C Print an error message and exit. -C - WRITE (LUN,110) N, NROW, NT - RETURN - ENDIF - IF (IFLAG .EQ. 0) THEN -C -C Print X, Y, and Z. -C - WRITE (LUN,101) - NL = 6 - DO 1 I = 1,N - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,103) I, X(I), Y(I), Z(I) - NL = NL + 1 - 1 CONTINUE - ELSEIF (IFLAG .GT. 0) THEN -C -C Print X (longitude) and Y (latitude). -C - WRITE (LUN,102) - NL = 6 - DO 2 I = 1,N - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF -! WRITE (LUN,104) I, X(I), Y(I) - WRITE (LUN,103) I, X(I), Y(I), Z(I) - NL = NL + 1 - 2 CONTINUE - ENDIF -C -C Print the triangulation LTRI. -C - IF (NL .GT. NLMAX/2) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - IF (NROW .EQ. 6) THEN - WRITE (LUN,105) - ELSE - WRITE (LUN,106) - ENDIF - NL = NL + 5 - DO 3 K = 1,NT - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,107) K, (LTRI(I,K), I = 1,NROW) - NL = NL + 1 - 3 CONTINUE -C -C Print NB, NA, and NT (boundary nodes, arcs, and -C triangles). -C - NB = 2*N - NT - 2 - IF (NB .LT. 3) THEN - NB = 0 - NA = 3*N - 6 - ELSE - NA = NT + N - 1 - ENDIF - WRITE (LUN,109) NB, NA, NT - RETURN -C -C Print formats: -C - 100 FORMAT (///18X,'STRIPACK (TRLIST) Output, N = ',I4) - 101 FORMAT (//8X,'Node',10X,'X(Node)',10X,'Y(Node)',10X, - . 'Z(Node)'//) -! 102 FORMAT (//16X,'Node',8X,'Longitude',9X,'Latitude'//) - 102 FORMAT (//8X,'Node',8X,'XXXXXXXXX',9X,'YYYYYYYY',9X,'ZZZZZZZZ'//) - - 103 FORMAT (8X,I4,3E17.6) - 104 FORMAT (16X,I4,2E17.6) - 105 FORMAT (//1X,'Triangle',8X,'Vertices',12X,'Neighbors'/ - . 4X,'KT',7X,'N1',5X,'N2',5X,'N3',4X,'KT1',4X, - . 'KT2',4X,'KT3'/) - 106 FORMAT (//1X,'Triangle',8X,'Vertices',12X,'Neighbors', - . 14X,'Arcs'/ - . 4X,'KT',7X,'N1',5X,'N2',5X,'N3',4X,'KT1',4X, - . 'KT2',4X,'KT3',4X,'KA1',4X,'KA2',4X,'KA3'/) - 107 FORMAT (2X,I4,2X,6(3X,I4),3(2X,I5)) - 108 FORMAT (///) - 109 FORMAT (/1X,'NB = ',I4,' Boundary Nodes',5X, - . 'NA = ',I5,' Arcs',5X,'NT = ',I5, - . ' Triangles') - 110 FORMAT (//1X,10X,'*** Invalid Parameter: N =',I5, - . ', NROW =',I5,', NT =',I5,' ***') - END - SUBROUTINE TRMESH (N,X,Y,Z, LIST,LPTR,LEND,LNEW,NEAR, - . NEXT,DIST,IER) - INTEGER N, LIST(*), LPTR(*), LEND(N), LNEW, NEAR(N), - . NEXT(N), IER - REAL X(N), Y(N), Z(N), DIST(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/08/99 -C -C This subroutine creates a Delaunay triangulation of a -C set of N arbitrarily distributed points, referred to as -C nodes, on the surface of the unit sphere. The Delaunay -C triangulation is defined as a set of (spherical) triangles -C with the following five properties: -C -C 1) The triangle vertices are nodes. -C 2) No triangle contains a node other than its vertices. -C 3) The interiors of the triangles are pairwise disjoint. -C 4) The union of triangles is the convex hull of the set -C of nodes (the smallest convex set that contains -C the nodes). If the nodes are not contained in a -C single hemisphere, their convex hull is the en- -C tire sphere and there are no boundary nodes. -C Otherwise, there are at least three boundary nodes. -C 5) The interior of the circumcircle of each triangle -C contains no node. -C -C The first four properties define a triangulation, and the -C last property results in a triangulation which is as close -C as possible to equiangular in a certain sense and which is -C uniquely defined unless four or more nodes lie in a common -C plane. This property makes the triangulation well-suited -C for solving closest-point problems and for triangle-based -C interpolation. -C -C Provided the nodes are randomly ordered, the algorithm -C has expected time complexity O(N*log(N)) for most nodal -C distributions. Note, however, that the complexity may be -C as high as O(N**2) if, for example, the nodes are ordered -C on increasing latitude. -C -C Spherical coordinates (latitude and longitude) may be -C converted to Cartesian coordinates by Subroutine TRANS. -C -C The following is a list of the software package modules -C which a user may wish to call directly: -C -C ADDNOD - Updates the triangulation by appending a new -C node. -C -C AREAS - Returns the area of a spherical triangle. -C -C BNODES - Returns an array containing the indexes of the -C boundary nodes (if any) in counterclockwise -C order. Counts of boundary nodes, triangles, -C and arcs are also returned. -C -C CIRCUM - Returns the circumcenter of a spherical trian- -C gle. -C -C CRLIST - Returns the set of triangle circumcenters -C (Voronoi vertices) and circumradii associated -C with a triangulation. -C -C DELARC - Deletes a boundary arc from a triangulation. -C -C DELNOD - Updates the triangulation with a nodal deletion. -C -C EDGE - Forces an arbitrary pair of nodes to be connec- -C ted by an arc in the triangulation. -C -C GETNP - Determines the ordered sequence of L closest -C nodes to a given node, along with the associ- -C ated distances. -C -C INSIDE - Locates a point relative to a polygon on the -C surface of the sphere. -C -C INTRSC - Returns the point of intersection between a -C pair of great circle arcs. -C -C JRAND - Generates a uniformly distributed pseudo-random -C integer. -C -C LEFT - Locates a point relative to a great circle. -C -C NEARND - Returns the index of the nearest node to an -C arbitrary point, along with its squared -C distance. -C -C SCOORD - Converts a point from Cartesian coordinates to -C spherical coordinates. -C -C STORE - Forces a value to be stored in main memory so -C that the precision of floating point numbers -C in memory locations rather than registers is -C computed. -C -C TRANS - Transforms spherical coordinates into Cartesian -C coordinates on the unit sphere for input to -C Subroutine TRMESH. -C -C TRLIST - Converts the triangulation data structure to a -C triangle list more suitable for use in a fin- -C ite element code. -C -C TRLPRT - Prints the triangle list created by Subroutine -C TRLIST. -C -C TRMESH - Creates a Delaunay triangulation of a set of -C nodes. -C -C TRPLOT - Creates a level-2 Encapsulated Postscript (EPS) -C file containing a triangulation plot. -C -C TRPRNT - Prints the triangulation data structure and, -C optionally, the nodal coordinates. -C -C VRPLOT - Creates a level-2 Encapsulated Postscript (EPS) -C file containing a Voronoi diagram plot. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of distinct nodes. (X(K),Y(K), -C Z(K)) is referred to as node K, and K is re- -C ferred to as a nodal index. It is required -C that X(K)**2 + Y(K)**2 + Z(K)**2 = 1 for all -C K. The first three nodes must not be col- -C linear (lie on a common great circle). -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR = Arrays of length at least 6N-12. -C -C LEND = Array of length at least N. -C -C NEAR,NEXT,DIST = Work space arrays of length at -C least N. The space is used to -C efficiently determine the nearest -C triangulation node to each un- -C processed node for use by ADDNOD. -C -C On output: -C -C LIST = Set of nodal indexes which, along with LPTR, -C LEND, and LNEW, define the triangulation as a -C set of N adjacency lists -- counterclockwise- -C ordered sequences of neighboring nodes such -C that the first and last neighbors of a bound- -C ary node are boundary nodes (the first neigh- -C bor of an interior node is arbitrary). In -C order to distinguish between interior and -C boundary nodes, the last neighbor of each -C boundary node is represented by the negative -C of its index. -C -C LPTR = Set of pointers (LIST indexes) in one-to-one -C correspondence with the elements of LIST. -C LIST(LPTR(I)) indexes the node which follows -C LIST(I) in cyclical counterclockwise order -C (the first neighbor follows the last neigh- -C bor). -C -C LEND = Set of pointers to adjacency lists. LEND(K) -C points to the last neighbor of node K for -C K = 1,...,N. Thus, LIST(LEND(K)) < 0 if and -C only if K is a boundary node. -C -C LNEW = Pointer to the first empty location in LIST -C and LPTR (list length plus one). LIST, LPTR, -C LEND, and LNEW are not altered if IER < 0, -C and are incomplete if IER > 0. -C -C NEAR,NEXT,DIST = Garbage. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = -1 if N < 3 on input. -C IER = -2 if the first three nodes are -C collinear. -C IER = L if nodes L and M coincide for some -C M > L. The data structure represents -C a triangulation of nodes 1 to M-1 in -C this case. -C -C Modules required by TRMESH: ADDNOD, BDYADD, COVSPH, -C INSERT, INTADD, JRAND, -C LEFT, LSTPTR, STORE, SWAP, -C SWPTST, TRFIND -C -C Intrinsic function called by TRMESH: ABS -C -C*********************************************************** -C - INTEGER I, I0, J, K, LP, LPL, NEXTI, NN - LOGICAL LEFT - REAL D, D1, D2, D3 -C -C Local parameters: -C -C D = (Negative cosine of) distance from node K to -C node I -C D1,D2,D3 = Distances from node K to nodes 1, 2, and 3, -C respectively -C I,J = Nodal indexes -C I0 = Index of the node preceding I in a sequence of -C unprocessed nodes: I = NEXT(I0) -C K = Index of node to be added and DO-loop index: -C K > 3 -C LP = LIST index (pointer) of a neighbor of K -C LPL = Pointer to the last neighbor of K -C NEXTI = NEXT(I) -C NN = Local copy of N -C - NN = N - IF (NN .LT. 3) THEN - IER = -1 - RETURN - ENDIF -C -C Store the first triangle in the linked list. -C - IF ( .NOT. LEFT (X(1),Y(1),Z(1),X(2),Y(2),Z(2), - . X(3),Y(3),Z(3)) ) THEN -C -C The first triangle is (3,2,1) = (2,1,3) = (1,3,2). -C - LIST(1) = 3 - LPTR(1) = 2 - LIST(2) = -2 - LPTR(2) = 1 - LEND(1) = 2 -C - LIST(3) = 1 - LPTR(3) = 4 - LIST(4) = -3 - LPTR(4) = 3 - LEND(2) = 4 -C - LIST(5) = 2 - LPTR(5) = 6 - LIST(6) = -1 - LPTR(6) = 5 - LEND(3) = 6 -C - ELSEIF ( .NOT. LEFT(X(2),Y(2),Z(2),X(1),Y(1),Z(1), - . X(3),Y(3),Z(3)) ) - . THEN -C -C The first triangle is (1,2,3): 3 Strictly Left 1->2, -C i.e., node 3 lies in the left hemisphere defined by -C arc 1->2. -C - LIST(1) = 2 - LPTR(1) = 2 - LIST(2) = -3 - LPTR(2) = 1 - LEND(1) = 2 -C - LIST(3) = 3 - LPTR(3) = 4 - LIST(4) = -1 - LPTR(4) = 3 - LEND(2) = 4 -C - LIST(5) = 1 - LPTR(5) = 6 - LIST(6) = -2 - LPTR(6) = 5 - LEND(3) = 6 -C - ELSE -C -C The first three nodes are collinear. -C - IER = -2 - RETURN - ENDIF -C -C Initialize LNEW and test for N = 3. -C - LNEW = 7 - IF (NN .EQ. 3) THEN - IER = 0 - RETURN - ENDIF -C -C A nearest-node data structure (NEAR, NEXT, and DIST) is -C used to obtain an expected-time (N*log(N)) incremental -C algorithm by enabling constant search time for locating -C each new node in the triangulation. -C -C For each unprocessed node K, NEAR(K) is the index of the -C triangulation node closest to K (used as the starting -C point for the search in Subroutine TRFIND) and DIST(K) -C is an increasing function of the arc length (angular -C distance) between nodes K and NEAR(K): -Cos(a) for arc -C length a. -C -C Since it is necessary to efficiently find the subset of -C unprocessed nodes associated with each triangulation -C node J (those that have J as their NEAR entries), the -C subsets are stored in NEAR and NEXT as follows: for -C each node J in the triangulation, I = NEAR(J) is the -C first unprocessed node in J's set (with I = 0 if the -C set is empty), L = NEXT(I) (if I > 0) is the second, -C NEXT(L) (if L > 0) is the third, etc. The nodes in each -C set are initially ordered by increasing indexes (which -C maximizes efficiency) but that ordering is not main- -C tained as the data structure is updated. -C -C Initialize the data structure for the single triangle. -C - NEAR(1) = 0 - NEAR(2) = 0 - NEAR(3) = 0 - DO 1 K = NN,4,-1 - D1 = -(X(K)*X(1) + Y(K)*Y(1) + Z(K)*Z(1)) - D2 = -(X(K)*X(2) + Y(K)*Y(2) + Z(K)*Z(2)) - D3 = -(X(K)*X(3) + Y(K)*Y(3) + Z(K)*Z(3)) - IF (D1 .LE. D2 .AND. D1 .LE. D3) THEN - NEAR(K) = 1 - DIST(K) = D1 - NEXT(K) = NEAR(1) - NEAR(1) = K - ELSEIF (D2 .LE. D1 .AND. D2 .LE. D3) THEN - NEAR(K) = 2 - DIST(K) = D2 - NEXT(K) = NEAR(2) - NEAR(2) = K - ELSE - NEAR(K) = 3 - DIST(K) = D3 - NEXT(K) = NEAR(3) - NEAR(3) = K - ENDIF - 1 CONTINUE -C -C Add the remaining nodes -C - DO 6 K = 4,NN - CALL ADDNOD (NEAR(K),K,X,Y,Z, LIST,LPTR,LEND, - . LNEW, IER) - IF (IER .NE. 0) RETURN -C -C Remove K from the set of unprocessed nodes associated -C with NEAR(K). -C - I = NEAR(K) - IF (NEAR(I) .EQ. K) THEN - NEAR(I) = NEXT(K) - ELSE - I = NEAR(I) - 2 I0 = I - I = NEXT(I0) - IF (I .NE. K) GO TO 2 - NEXT(I0) = NEXT(K) - ENDIF - NEAR(K) = 0 -C -C Loop on neighbors J of node K. -C - LPL = LEND(K) - LP = LPL - 3 LP = LPTR(LP) - J = ABS(LIST(LP)) -C -C Loop on elements I in the sequence of unprocessed nodes -C associated with J: K is a candidate for replacing J -C as the nearest triangulation node to I. The next value -C of I in the sequence, NEXT(I), must be saved before I -C is moved because it is altered by adding I to K's set. -C - I = NEAR(J) - 4 IF (I .EQ. 0) GO TO 5 - NEXTI = NEXT(I) -C -C Test for the distance from I to K less than the distance -C from I to J. -C - D = -(X(I)*X(K) + Y(I)*Y(K) + Z(I)*Z(K)) - IF (D .LT. DIST(I)) THEN -C -C Replace J by K as the nearest triangulation node to I: -C update NEAR(I) and DIST(I), and remove I from J's set -C of unprocessed nodes and add it to K's set. -C - NEAR(I) = K - DIST(I) = D - IF (I .EQ. NEAR(J)) THEN - NEAR(J) = NEXTI - ELSE - NEXT(I0) = NEXTI - ENDIF - NEXT(I) = NEAR(K) - NEAR(K) = I - ELSE - I0 = I - ENDIF -C -C Bottom of loop on I. -C - I = NEXTI - GO TO 4 -C -C Bottom of loop on neighbors J. -C - 5 IF (LP .NE. LPL) GO TO 3 - 6 CONTINUE - RETURN - END - SUBROUTINE TRPLOT (LUN,PLTSIZ,ELAT,ELON,A,N,X,Y,Z, - . LIST,LPTR,LEND,TITLE,NUMBR, IER) - CHARACTER*(*) TITLE - INTEGER LUN, N, LIST(*), LPTR(*), LEND(N), IER - LOGICAL NUMBR - REAL PLTSIZ, ELAT, ELON, A, X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/16/98 -C -C This subroutine creates a level-2 Encapsulated Post- -C script (EPS) file containing a graphical display of a -C triangulation of a set of nodes on the unit sphere. The -C visible nodes are projected onto the plane that contains -C the origin and has normal defined by a user-specified eye- -C position. Projections of adjacent (visible) nodes are -C connected by line segments. -C -C -C On input: -C -C LUN = Logical unit number in the range 0 to 99. -C The unit should be opened with an appropriate -C file name before the call to this routine. -C -C PLTSIZ = Plot size in inches. A circular window in -C the projection plane is mapped to a circu- -C lar viewport with diameter equal to .88* -C PLTSIZ (leaving room for labels outside the -C viewport). The viewport is centered on the -C 8.5 by 11 inch page, and its boundary is -C drawn. 1.0 .LE. PLTSIZ .LE. 8.5. -C -C ELAT,ELON = Latitude and longitude (in degrees) of -C the center of projection E (the center -C of the plot). The projection plane is -C the plane that contains the origin and -C has E as unit normal. In a rotated -C coordinate system for which E is the -C north pole, the projection plane con- -C tains the equator, and only northern -C hemisphere nodes are visible (from the -C point at infinity in the direction E). -C These are projected orthogonally onto -C the projection plane (by zeroing the z- -C component in the rotated coordinate -C system). ELAT and ELON must be in the -C range -90 to 90 and -180 to 180, respec- -C tively. -C -C A = Angular distance in degrees from E to the boun- -C dary of a circular window against which the -C triangulation is clipped. The projected window -C is a disk of radius r = Sin(A) centered at the -C origin, and only visible nodes whose projections -C are within distance r of the origin are included -C in the plot. Thus, if A = 90, the plot includes -C the entire hemisphere centered at E. 0 .LT. A -C .LE. 90. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C TITLE = Type CHARACTER variable or constant contain- -C ing a string to be centered above the plot. -C The string must be enclosed in parentheses; -C i.e., the first and last characters must be -C '(' and ')', respectively, but these are not -C displayed. TITLE may have at most 80 char- -C acters including the parentheses. -C -C NUMBR = Option indicator: If NUMBR = TRUE, the -C nodal indexes are plotted next to the nodes. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LUN, PLTSIZ, or N is outside its -C valid range. -C IER = 2 if ELAT, ELON, or A is outside its -C valid range. -C IER = 3 if an error was encountered in writing -C to unit LUN. -C -C The values in the data statement below may be altered -C in order to modify various plotting options. -C -C Modules required by TRPLOT: None -C -C Intrinsic functions called by TRPLOT: ABS, ATAN, COS, -C NINT, REAL, SIN, -C SQRT -C -C*********************************************************** -C - INTEGER IPX1, IPX2, IPY1, IPY2, IR, LP, LPL, N0, N1 - LOGICAL ANNOT - REAL CF, CT, EX, EY, EZ, FSIZN, FSIZT, R11, R12, - . R21, R22, R23, SF, T, TX, TY, WR, WRS, X0, X1, - . Y0, Y1, Z0, Z1 -C - DATA ANNOT/.TRUE./, FSIZN/10.0/, FSIZT/16.0/ -C -C Local parameters: -C -C ANNOT = Logical variable with value TRUE iff the plot -C is to be annotated with the values of ELAT, -C ELON, and A -C CF = Conversion factor for degrees to radians -C CT = Cos(ELAT) -C EX,EY,EZ = Cartesian coordinates of the eye-position E -C FSIZN = Font size in points for labeling nodes with -C their indexes if NUMBR = TRUE -C FSIZT = Font size in points for the title (and -C annotation if ANNOT = TRUE) -C IPX1,IPY1 = X and y coordinates (in points) of the lower -C left corner of the bounding box or viewport -C box -C IPX2,IPY2 = X and y coordinates (in points) of the upper -C right corner of the bounding box or viewport -C box -C IR = Half the width (height) of the bounding box or -C viewport box in points -- viewport radius -C LP = LIST index (pointer) -C LPL = Pointer to the last neighbor of N0 -C N0 = Index of a node whose incident arcs are to be -C drawn -C N1 = Neighbor of N0 -C R11...R23 = Components of the first two rows of a rotation -C that maps E to the north pole (0,0,1) -C SF = Scale factor for mapping world coordinates -C (window coordinates in [-WR,WR] X [-WR,WR]) -C to viewport coordinates in [IPX1,IPX2] X -C [IPY1,IPY2] -C T = Temporary variable -C TX,TY = Translation vector for mapping world coordi- -C nates to viewport coordinates -C WR = Window radius r = Sin(A) -C WRS = WR**2 -C X0,Y0,Z0 = Coordinates of N0 in the rotated coordinate -C system or label location (X0,Y0) -C X1,Y1,Z1 = Coordinates of N1 in the rotated coordinate -C system or intersection of edge N0-N1 with -C the equator (in the rotated coordinate -C system) -C -C -C Test for invalid parameters. -C - IF (LUN .LT. 0 .OR. LUN .GT. 99 .OR. - . PLTSIZ .LT. 1.0 .OR. PLTSIZ .GT. 8.5 .OR. - . N .LT. 3) - . GO TO 11 - IF (ABS(ELAT) .GT. 90.0 .OR. ABS(ELON) .GT. 180.0 - . .OR. A .GT. 90.0) GO TO 12 -C -C Compute a conversion factor CF for degrees to radians -C and compute the window radius WR. -C - CF = ATAN(1.0)/45.0 - WR = SIN(CF*A) - WRS = WR*WR -C -C Compute the lower left (IPX1,IPY1) and upper right -C (IPX2,IPY2) corner coordinates of the bounding box. -C The coordinates, specified in default user space units -C (points, at 72 points/inch with origin at the lower -C left corner of the page), are chosen to preserve the -C square aspect ratio, and to center the plot on the 8.5 -C by 11 inch page. The center of the page is (306,396), -C and IR = PLTSIZ/2 in points. -C - IR = NINT(36.0*PLTSIZ) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Output header comments. -C - WRITE (LUN,100,ERR=13) IPX1, IPY1, IPX2, IPY2 - 100 FORMAT ('%!PS-Adobe-3.0 EPSF-3.0'/ - . '%%BoundingBox:',4I4/ - . '%%Title: Triangulation'/ - . '%%Creator: STRIPACK'/ - . '%%EndComments') -C -C Set (IPX1,IPY1) and (IPX2,IPY2) to the corner coordinates -C of a viewport box obtained by shrinking the bounding box -C by 12% in each dimension. -C - IR = NINT(0.88*REAL(IR)) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Set the line thickness to 2 points, and draw the -C viewport boundary. -C - T = 2.0 - WRITE (LUN,110,ERR=13) T - WRITE (LUN,120,ERR=13) IR - WRITE (LUN,130,ERR=13) - 110 FORMAT (F12.6,' setlinewidth') - 120 FORMAT ('306 396 ',I3,' 0 360 arc') - 130 FORMAT ('stroke') -C -C Set up an affine mapping from the window box [-WR,WR] X -C [-WR,WR] to the viewport box. -C - SF = REAL(IR)/WR - TX = IPX1 + SF*WR - TY = IPY1 + SF*WR - WRITE (LUN,140,ERR=13) TX, TY, SF, SF - 140 FORMAT (2F12.6,' translate'/ - . 2F12.6,' scale') -C -C The line thickness must be changed to reflect the new -C scaling which is applied to all subsequent output. -C Set it to 1.0 point. -C - T = 1.0/SF - WRITE (LUN,110,ERR=13) T -C -C Save the current graphics state, and set the clip path to -C the boundary of the window. -C - WRITE (LUN,150,ERR=13) - WRITE (LUN,160,ERR=13) WR - WRITE (LUN,170,ERR=13) - 150 FORMAT ('gsave') - 160 FORMAT ('0 0 ',F12.6,' 0 360 arc') - 170 FORMAT ('clip newpath') -C -C Compute the Cartesian coordinates of E and the components -C of a rotation R which maps E to the north pole (0,0,1). -C R is taken to be a rotation about the z-axis (into the -C yz-plane) followed by a rotation about the x-axis chosen -C so that the view-up direction is (0,0,1), or (-1,0,0) if -C E is the north or south pole. -C -C ( R11 R12 0 ) -C R = ( R21 R22 R23 ) -C ( EX EY EZ ) -C - T = CF*ELON - CT = COS(CF*ELAT) - EX = CT*COS(T) - EY = CT*SIN(T) - EZ = SIN(CF*ELAT) - IF (CT .NE. 0.0) THEN - R11 = -EY/CT - R12 = EX/CT - ELSE - R11 = 0.0 - R12 = 1.0 - ENDIF - R21 = -EZ*R12 - R22 = EZ*R11 - R23 = CT -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 3 N0 = 1,N - Z0 = EX*X(N0) + EY*Y(N0) + EZ*Z(N0) - IF (Z0 .LT. 0.) GO TO 3 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 3 - LPL = LEND(N0) - LP = LPL -C -C Loop on neighbors N1 of N0. LPL points to the last -C neighbor of N0. Copy the components of N1 into P. -C - 1 LP = LPTR(LP) - N1 = ABS(LIST(LP)) - X1 = R11*X(N1) + R12*Y(N1) - Y1 = R21*X(N1) + R22*Y(N1) + R23*Z(N1) - Z1 = EX*X(N1) + EY*Y(N1) + EZ*Z(N1) - IF (Z1 .LT. 0.) THEN -C -C N1 is a 'southern hemisphere' point. Move it to the -C intersection of edge N0-N1 with the equator so that -C the edge is clipped properly. Z1 is implicitly set -C to 0. -C - X1 = Z0*X1 - Z1*X0 - Y1 = Z0*Y1 - Z1*Y0 - T = SQRT(X1*X1+Y1*Y1) - X1 = X1/T - Y1 = Y1/T - ENDIF -C -C If node N1 is in the window and N1 < N0, bypass edge -C N0->N1 (since edge N1->N0 has already been drawn). -C - IF ( Z1 .GE. 0.0 .AND. X1*X1 + Y1*Y1 .LE. WRS - . .AND. N1 .LT. N0 ) GO TO 2 -C -C Add the edge to the path. -C - WRITE (LUN,180,ERR=13) X0, Y0, X1, Y1 - 180 FORMAT (2F12.6,' moveto',2F12.6,' lineto') -C -C Bottom of loops. -C - 2 IF (LP .NE. LPL) GO TO 1 - 3 CONTINUE -C -C Paint the path and restore the saved graphics state (with -C no clip path). -C - WRITE (LUN,130,ERR=13) - WRITE (LUN,190,ERR=13) - 190 FORMAT ('grestore') - IF (NUMBR) THEN -C -C Nodes in the window are to be labeled with their indexes. -C Convert FSIZN from points to world coordinates, and -C output the commands to select a font and scale it. -C - T = FSIZN/SF - WRITE (LUN,200,ERR=13) T - 200 FORMAT ('/Helvetica findfont'/ - . F12.6,' scalefont setfont') -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 4 N0 = 1,N - IF (EX*X(N0) + EY*Y(N0) + EZ*Z(N0) .LT. 0.) - . GO TO 4 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 4 -C -C Move to (X0,Y0) and draw the label N0. The first char- -C acter will will have its lower left corner about one -C character width to the right of the nodal position. -C - WRITE (LUN,210,ERR=13) X0, Y0 - 210 FORMAT (2F12.6,' moveto') - WRITE (LUN,220,ERR=13) N0 - 220 FORMAT ('(',I3,') show') - 4 CONTINUE - ENDIF -C -C Convert FSIZT from points to world coordinates, and output -C the commands to select a font and scale it. -C - T = FSIZT/SF - WRITE (LUN,200,ERR=13) T -C -C Display TITLE centered above the plot: -C -C Y0 = WR + 3.0*T -C WRITE (LUN,230,ERR=13) TITLE, Y0 -C 230 FORMAT (A80/' stringwidth pop 2 div neg ',F12.6, -C . ' moveto') -C WRITE (LUN,240,ERR=13) TITLE -C 240 FORMAT (A80/' show') -C IF (ANNOT) THEN -C -C Display the window center and radius below the plot. -C -C X0 = -WR -C Y0 = -WR - 50.0/SF -C WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,250,ERR=13) ELAT, ELON -C Y0 = Y0 - 2.0*T -C WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,260,ERR=13) A -C 250 FORMAT ('(Window center: ELAT = ',F7.2, -C . ', ELON = ',F8.2,') show') -C 260 FORMAT ('(Angular extent: A = ',F5.2,') show') -C ENDIF -C -C Paint the path and output the showpage command and -C end-of-file indicator. -C - WRITE (LUN,270,ERR=13) - 270 FORMAT ('stroke'/ - . 'showpage'/ - . '%%EOF') -C -C HP's interpreters require a one-byte End-of-PostScript-Job -C indicator (to eliminate a timeout error message): -C ASCII 4. -C - WRITE (LUN,280,ERR=13) CHAR(4) - 280 FORMAT (A1) -C -C No error encountered. -C - IER = 0 - RETURN -C -C Invalid input parameter LUN, PLTSIZ, or N. -C - 11 IER = 1 - RETURN -C -C Invalid input parameter ELAT, ELON, or A. -C - 12 IER = 2 - RETURN -C -C Error writing to unit LUN. -C - 13 IER = 3 - RETURN - END - SUBROUTINE TRPRNT (N,X,Y,Z,IFLAG,LIST,LPTR,LEND,LOUT) - INTEGER N, IFLAG, LIST(*), LPTR(*), LEND(N), LOUT - REAL X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/25/98 -C -C This subroutine prints the triangulation adjacency lists -C created by Subroutine TRMESH and, optionally, the nodal -C coordinates (either latitude and longitude or Cartesian -C coordinates) on logical unit LOUT. The list of neighbors -C of a boundary node is followed by index 0. The numbers of -C boundary nodes, triangles, and arcs are also printed. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3 -C and N .LE. 9999. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes if IFLAG = 0, or -C (X and Y only) arrays of length N containing -C longitude and latitude, respectively, if -C IFLAG > 0, or unused dummy parameters if -C IFLAG < 0. -C -C IFLAG = Nodal coordinate option indicator: -C IFLAG = 0 if X, Y, and Z (assumed to contain -C Cartesian coordinates) are to be -C printed (to 6 decimal places). -C IFLAG > 0 if only X and Y (assumed to con- -C tain longitude and latitude) are -C to be printed (to 6 decimal -C places). -C IFLAG < 0 if only the adjacency lists are to -C be printed. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C LOUT = Logical unit for output. If LOUT is not in -C the range 0 to 99, output is written to -C logical unit 6. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C The adjacency lists and nodal coordinates (as specified -C by IFLAG) are written to unit LOUT. -C -C Modules required by TRPRNT: None -C -C*********************************************************** -C - INTEGER I, INC, K, LP, LPL, LUN, NA, NABOR(400), NB, - . ND, NL, NLMAX, NMAX, NODE, NN, NT - DATA NMAX/9999/, NLMAX/58/ -C -C Local parameters: -C -C I = NABOR index (1 to K) -C INC = Increment for NL associated with an adjacency list -C K = Counter and number of neighbors of NODE -C LP = LIST pointer of a neighbor of NODE -C LPL = Pointer to the last neighbor of NODE -C LUN = Logical unit for output (copy of LOUT) -C NA = Number of arcs in the triangulation -C NABOR = Array containing the adjacency list associated -C with NODE, with zero appended if NODE is a -C boundary node -C NB = Number of boundary nodes encountered -C ND = Index of a neighbor of NODE (or negative index) -C NL = Number of lines that have been printed on the -C current page -C NLMAX = Maximum number of print lines per page (except -C for the last page which may have two addi- -C tional lines) -C NMAX = Upper bound on N (allows 4-digit indexes) -C NODE = Index of a node and DO-loop index (1 to N) -C NN = Local copy of N -C NT = Number of triangles in the triangulation -C - NN = N - LUN = LOUT - IF (LUN .LT. 0 .OR. LUN .GT. 99) LUN = 6 -C -C Print a heading and test the range of N. -C - WRITE (LUN,100) NN - IF (NN .LT. 3 .OR. NN .GT. NMAX) THEN -C -C N is outside its valid range. -C - WRITE (LUN,110) - RETURN - ENDIF -C -C Initialize NL (the number of lines printed on the current -C page) and NB (the number of boundary nodes encountered). -C - NL = 6 - NB = 0 - IF (IFLAG .LT. 0) THEN -C -C Print LIST only. K is the number of neighbors of NODE -C that have been stored in NABOR. -C - WRITE (LUN,101) - DO 2 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 1 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 1 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. Correct the sign of the last -C neighbor, add 0 to the end of the list, and increment -C NB. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print the list of neighbors. -C - INC = (K-1)/14 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,104) NODE, (NABOR(I), I = 1,K) - IF (K .NE. 14) WRITE (LUN,107) - 2 CONTINUE - ELSEIF (IFLAG .GT. 0) THEN -C -C Print X (longitude), Y (latitude), and LIST. -C - WRITE (LUN,102) - DO 4 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 3 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 3 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print X, Y, and NABOR. -C - INC = (K-1)/8 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,105) NODE, (NABOR(I), I = 1,K) -! WRITE (LUN,105) NODE, X(NODE), Y(NODE), -! . (NABOR(I), I = 1,K) - IF (K .NE. 8) WRITE (LUN,107) - 4 CONTINUE - ELSE -C -C Print X, Y, Z, and LIST. -C - WRITE (LUN,103) - DO 6 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 5 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 5 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print X, Y, Z, and NABOR. -C - INC = (K-1)/5 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,106) NODE, X(NODE), Y(NODE), - . Z(NODE), (NABOR(I), I = 1,K) - IF (K .NE. 5) WRITE (LUN,107) - 6 CONTINUE - ENDIF -C -C Print NB, NA, and NT (boundary nodes, arcs, and -C triangles). -C - IF (NB .NE. 0) THEN - NA = 3*NN - NB - 3 - NT = 2*NN - NB - 2 - ELSE - NA = 3*NN - 6 - NT = 2*NN - 4 - ENDIF - WRITE (LUN,109) NB, NA, NT - RETURN -C -C Print formats: -C - 100 FORMAT (///15X,'STRIPACK Triangulation Data ', - . 'Structure, N = ',I5//) - 101 FORMAT (1X,'Node',31X,'Neighbors of Node'//) -! 102 FORMAT (1X,'Node',5X,'Longitude',6X,'Latitude', -! . 18X,'Neighbors of Node'//) - 102 FORMAT (1X,'Node', 8X,'Neighbors of Node'//) - 103 FORMAT (1X,'Node',5X,'X(Node)',8X,'Y(Node)',8X, - . 'Z(Node)',11X,'Neighbors of Node'//) - 104 FORMAT (1X,I4,4X,14I5/(1X,8X,14I5)) -! 105 FORMAT (1X,I4,2E15.6,4X,8I5/(1X,38X,8I5)) - 105 FORMAT (1X,I4,4X,8I5/(1X,38X,8I5)) - 106 FORMAT (1X,I4,3E15.6,4X,5I5/(1X,53X,5I5)) - 107 FORMAT (1X) - 108 FORMAT (///) - 109 FORMAT (/1X,'NB = ',I4,' Boundary Nodes',5X, - . 'NA = ',I5,' Arcs',5X,'NT = ',I5, - . ' Triangles') - 110 FORMAT (1X,10X,'*** N is outside its valid', - . ' range ***') - END - SUBROUTINE VRPLOT (LUN,PLTSIZ,ELAT,ELON,A,N,X,Y,Z, - . NT,LISTC,LPTR,LEND,XC,YC,ZC,TITLE, - . NUMBR, IER) - CHARACTER*(*) TITLE - INTEGER LUN, N, NT, LISTC(*), LPTR(*), LEND(N), IER - LOGICAL NUMBR - REAL PLTSIZ, ELAT, ELON, A, X(N), Y(N), Z(N), - . XC(NT), YC(NT), ZC(NT) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/16/98 -C -C This subroutine creates a level-2 Encapsulated Post- -C script (EPS) file containing a graphical depiction of a -C Voronoi diagram of a set of nodes on the unit sphere. -C The visible vertices are projected onto the plane that -C contains the origin and has normal defined by a user- -C specified eye-position. Projections of adjacent (visible) -C Voronoi vertices are connected by line segments. -C -C The parameters defining the Voronoi diagram may be com- -C puted by Subroutine CRLIST. -C -C -C On input: -C -C LUN = Logical unit number in the range 0 to 99. -C The unit should be opened with an appropriate -C file name before the call to this routine. -C -C PLTSIZ = Plot size in inches. A circular window in -C the projection plane is mapped to a circu- -C lar viewport with diameter equal to .88* -C PLTSIZ (leaving room for labels outside the -C viewport). The viewport is centered on the -C 8.5 by 11 inch page, and its boundary is -C drawn. 1.0 .LE. PLTSIZ .LE. 8.5. -C -C ELAT,ELON = Latitude and longitude (in degrees) of -C the center of projection E (the center -C of the plot). The projection plane is -C the plane that contains the origin and -C has E as unit normal. In a rotated -C coordinate system for which E is the -C north pole, the projection plane con- -C tains the equator, and only northern -C hemisphere points are visible (from the -C point at infinity in the direction E). -C These are projected orthogonally onto -C the projection plane (by zeroing the z- -C component in the rotated coordinate -C system). ELAT and ELON must be in the -C range -90 to 90 and -180 to 180, respec- -C tively. -C -C A = Angular distance in degrees from E to the boun- -C dary of a circular window against which the -C Voronoi diagram is clipped. The projected win- -C dow is a disk of radius r = Sin(A) centered at -C the origin, and only visible vertices whose -C projections are within distance r of the origin -C are included in the plot. Thus, if A = 90, the -C plot includes the entire hemisphere centered at -C E. 0 .LT. A .LE. 90. -C -C N = Number of nodes (Voronoi centers) and Voronoi -C regions. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C NT = Number of Voronoi region vertices (triangles, -C including those in the extended triangulation -C if the number of boundary nodes NB is nonzero): -C NT = 2*N-4. -C -C LISTC = Array of length 3*NT containing triangle -C indexes (indexes to XC, YC, and ZC) stored -C in 1-1 correspondence with LIST/LPTR entries -C (or entries that would be stored in LIST for -C the extended triangulation): the index of -C triangle (N1,N2,N3) is stored in LISTC(K), -C LISTC(L), and LISTC(M), where LIST(K), -C LIST(L), and LIST(M) are the indexes of N2 -C as a neighbor of N1, N3 as a neighbor of N2, -C and N1 as a neighbor of N3. The Voronoi -C region associated with a node is defined by -C the CCW-ordered sequence of circumcenters in -C one-to-one correspondence with its adjacency -C list (in the extended triangulation). -C -C LPTR = Array of length 3*NT = 6*N-12 containing a -C set of pointers (LISTC indexes) in one-to-one -C correspondence with the elements of LISTC. -C LISTC(LPTR(I)) indexes the triangle which -C follows LISTC(I) in cyclical counterclockwise -C order (the first neighbor follows the last -C neighbor). -C -C LEND = Array of length N containing a set of -C pointers to triangle lists. LP = LEND(K) -C points to a triangle (indexed by LISTC(LP)) -C containing node K for K = 1 to N. -C -C XC,YC,ZC = Arrays of length NT containing the -C Cartesian coordinates of the triangle -C circumcenters (Voronoi vertices). -C XC(I)**2 + YC(I)**2 + ZC(I)**2 = 1. -C -C TITLE = Type CHARACTER variable or constant contain- -C ing a string to be centered above the plot. -C The string must be enclosed in parentheses; -C i.e., the first and last characters must be -C '(' and ')', respectively, but these are not -C displayed. TITLE may have at most 80 char- -C acters including the parentheses. -C -C NUMBR = Option indicator: If NUMBR = TRUE, the -C nodal indexes are plotted at the Voronoi -C region centers. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LUN, PLTSIZ, N, or NT is outside -C its valid range. -C IER = 2 if ELAT, ELON, or A is outside its -C valid range. -C IER = 3 if an error was encountered in writing -C to unit LUN. -C -C Modules required by VRPLOT: None -C -C Intrinsic functions called by VRPLOT: ABS, ATAN, COS, -C NINT, REAL, SIN, -C SQRT -C -C*********************************************************** -C - INTEGER IPX1, IPX2, IPY1, IPY2, IR, KV1, KV2, LP, LPL, - . N0 - LOGICAL ANNOT, IN1, IN2 - REAL CF, CT, EX, EY, EZ, FSIZN, FSIZT, R11, R12, - . R21, R22, R23, SF, T, TX, TY, WR, WRS, X0, X1, - . X2, Y0, Y1, Y2, Z1, Z2 -C - DATA ANNOT/.TRUE./, FSIZN/10.0/, FSIZT/16.0/ -C -C Local parameters: -C -C ANNOT = Logical variable with value TRUE iff the plot -C is to be annotated with the values of ELAT, -C ELON, and A -C CF = Conversion factor for degrees to radians -C CT = Cos(ELAT) -C EX,EY,EZ = Cartesian coordinates of the eye-position E -C FSIZN = Font size in points for labeling nodes with -C their indexes if NUMBR = TRUE -C FSIZT = Font size in points for the title (and -C annotation if ANNOT = TRUE) -C IN1,IN2 = Logical variables with value TRUE iff the -C projections of vertices KV1 and KV2, respec- -C tively, are inside the window -C IPX1,IPY1 = X and y coordinates (in points) of the lower -C left corner of the bounding box or viewport -C box -C IPX2,IPY2 = X and y coordinates (in points) of the upper -C right corner of the bounding box or viewport -C box -C IR = Half the width (height) of the bounding box or -C viewport box in points -- viewport radius -C KV1,KV2 = Endpoint indexes of a Voronoi edge -C LP = LIST index (pointer) -C LPL = Pointer to the last neighbor of N0 -C N0 = Index of a node -C R11...R23 = Components of the first two rows of a rotation -C that maps E to the north pole (0,0,1) -C SF = Scale factor for mapping world coordinates -C (window coordinates in [-WR,WR] X [-WR,WR]) -C to viewport coordinates in [IPX1,IPX2] X -C [IPY1,IPY2] -C T = Temporary variable -C TX,TY = Translation vector for mapping world coordi- -C nates to viewport coordinates -C WR = Window radius r = Sin(A) -C WRS = WR**2 -C X0,Y0 = Projection plane coordinates of node N0 or -C label location -C X1,Y1,Z1 = Coordinates of vertex KV1 in the rotated -C coordinate system -C X2,Y2,Z2 = Coordinates of vertex KV2 in the rotated -C coordinate system or intersection of edge -C KV1-KV2 with the equator (in the rotated -C coordinate system) -C -C -C Test for invalid parameters. -C - IF (LUN .LT. 0 .OR. LUN .GT. 99 .OR. - . PLTSIZ .LT. 1.0 .OR. PLTSIZ .GT. 8.5 .OR. - . N .LT. 3 .OR. NT .NE. 2*N-4) - . GO TO 11 - IF (ABS(ELAT) .GT. 90.0 .OR. ABS(ELON) .GT. 180.0 - . .OR. A .GT. 90.0) GO TO 12 -C -C Compute a conversion factor CF for degrees to radians -C and compute the window radius WR. -C - CF = ATAN(1.0)/45.0 - WR = SIN(CF*A) - WRS = WR*WR -C -C Compute the lower left (IPX1,IPY1) and upper right -C (IPX2,IPY2) corner coordinates of the bounding box. -C The coordinates, specified in default user space units -C (points, at 72 points/inch with origin at the lower -C left corner of the page), are chosen to preserve the -C square aspect ratio, and to center the plot on the 8.5 -C by 11 inch page. The center of the page is (306,396), -C and IR = PLTSIZ/2 in points. -C - IR = NINT(36.0*PLTSIZ) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Output header comments. -C - WRITE (LUN,100,ERR=13) IPX1, IPY1, IPX2, IPY2 - 100 FORMAT ('%!PS-Adobe-3.0 EPSF-3.0'/ - . '%%BoundingBox:',4I4/ - . '%%Title: Voronoi diagram'/ - . '%%Creator: STRIPACK'/ - . '%%EndComments') -C -C Set (IPX1,IPY1) and (IPX2,IPY2) to the corner coordinates -C of a viewport box obtained by shrinking the bounding box -C by 12% in each dimension. -C - IR = NINT(0.88*REAL(IR)) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Set the line thickness to 2 points, and draw the -C viewport boundary. -C - T = 2.0 - WRITE (LUN,110,ERR=13) T - WRITE (LUN,120,ERR=13) IR - WRITE (LUN,130,ERR=13) - 110 FORMAT (F12.6,' setlinewidth') - 120 FORMAT ('306 396 ',I3,' 0 360 arc') - 130 FORMAT ('stroke') -C -C Set up an affine mapping from the window box [-WR,WR] X -C [-WR,WR] to the viewport box. -C - SF = REAL(IR)/WR - TX = IPX1 + SF*WR - TY = IPY1 + SF*WR - WRITE (LUN,140,ERR=13) TX, TY, SF, SF - 140 FORMAT (2F12.6,' translate'/ - . 2F12.6,' scale') -C -C The line thickness must be changed to reflect the new -C scaling which is applied to all subsequent output. -C Set it to 1.0 point. -C - T = 1.0/SF - WRITE (LUN,110,ERR=13) T -C -C Save the current graphics state, and set the clip path to -C the boundary of the window. -C - WRITE (LUN,150,ERR=13) - WRITE (LUN,160,ERR=13) WR - WRITE (LUN,170,ERR=13) - 150 FORMAT ('gsave') - 160 FORMAT ('0 0 ',F12.6,' 0 360 arc') - 170 FORMAT ('clip newpath') -C -C Compute the Cartesian coordinates of E and the components -C of a rotation R which maps E to the north pole (0,0,1). -C R is taken to be a rotation about the z-axis (into the -C yz-plane) followed by a rotation about the x-axis chosen -C so that the view-up direction is (0,0,1), or (-1,0,0) if -C E is the north or south pole. -C -C ( R11 R12 0 ) -C R = ( R21 R22 R23 ) -C ( EX EY EZ ) -C - T = CF*ELON - CT = COS(CF*ELAT) - EX = CT*COS(T) - EY = CT*SIN(T) - EZ = SIN(CF*ELAT) - IF (CT .NE. 0.0) THEN - R11 = -EY/CT - R12 = EX/CT - ELSE - R11 = 0.0 - R12 = 1.0 - ENDIF - R21 = -EZ*R12 - R22 = EZ*R11 - R23 = CT -C -C Loop on nodes (Voronoi centers) N0. -C LPL indexes the last neighbor of N0. -C - DO 3 N0 = 1,N - LPL = LEND(N0) -C -C Set KV2 to the first (and last) vertex index and compute -C its coordinates (X2,Y2,Z2) in the rotated coordinate -C system. -C - KV2 = LISTC(LPL) - X2 = R11*XC(KV2) + R12*YC(KV2) - Y2 = R21*XC(KV2) + R22*YC(KV2) + R23*ZC(KV2) - Z2 = EX*XC(KV2) + EY*YC(KV2) + EZ*ZC(KV2) -C -C IN2 = TRUE iff KV2 is in the window. -C - IN2 = Z2 .GE. 0. .AND. X2*X2 + Y2*Y2 .LE. WRS -C -C Loop on neighbors N1 of N0. For each triangulation edge -C N0-N1, KV1-KV2 is the corresponding Voronoi edge. -C - LP = LPL - 1 LP = LPTR(LP) - KV1 = KV2 - X1 = X2 - Y1 = Y2 - Z1 = Z2 - IN1 = IN2 - KV2 = LISTC(LP) -C -C Compute the new values of (X2,Y2,Z2) and IN2. -C - X2 = R11*XC(KV2) + R12*YC(KV2) - Y2 = R21*XC(KV2) + R22*YC(KV2) + R23*ZC(KV2) - Z2 = EX*XC(KV2) + EY*YC(KV2) + EZ*ZC(KV2) - IN2 = Z2 .GE. 0. .AND. X2*X2 + Y2*Y2 .LE. WRS -C -C Add edge KV1-KV2 to the path iff both endpoints are inside -C the window and KV2 > KV1, or KV1 is inside and KV2 is -C outside (so that the edge is drawn only once). -C - IF (.NOT. IN1 .OR. (IN2 .AND. KV2 .LE. KV1)) - . GO TO 2 - IF (Z2 .LT. 0.) THEN -C -C KV2 is a 'southern hemisphere' point. Move it to the -C intersection of edge KV1-KV2 with the equator so that -C the edge is clipped properly. Z2 is implicitly set -C to 0. -C - X2 = Z1*X2 - Z2*X1 - Y2 = Z1*Y2 - Z2*Y1 - T = SQRT(X2*X2+Y2*Y2) - X2 = X2/T - Y2 = Y2/T - ENDIF - WRITE (LUN,180,ERR=13) X1, Y1, X2, Y2 - 180 FORMAT (2F12.6,' moveto',2F12.6,' lineto') -C -C Bottom of loops. -C - 2 IF (LP .NE. LPL) GO TO 1 - 3 CONTINUE -C -C Paint the path and restore the saved graphics state (with -C no clip path). -C - WRITE (LUN,130,ERR=13) - WRITE (LUN,190,ERR=13) - 190 FORMAT ('grestore') - IF (NUMBR) THEN -C -C Nodes in the window are to be labeled with their indexes. -C Convert FSIZN from points to world coordinates, and -C output the commands to select a font and scale it. -C - T = FSIZN/SF - WRITE (LUN,200,ERR=13) T - 200 FORMAT ('/Helvetica findfont'/ - . F12.6,' scalefont setfont') -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 4 N0 = 1,N - IF (EX*X(N0) + EY*Y(N0) + EZ*Z(N0) .LT. 0.) - . GO TO 4 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 4 -C -C Move to (X0,Y0), and draw the label N0 with the origin -C of the first character at (X0,Y0). -C - WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,220,ERR=13) - WRITE (LUN,220,ERR=13) N0 - 210 FORMAT (2F12.6,' moveto') -C 220 FORMAT ('(','.',') show') - 220 FORMAT ('(',I3,') show') - 4 CONTINUE - ENDIF -C -C Convert FSIZT from points to world coordinates, and output -C the commands to select a font and scale it. -C - T = FSIZT/SF - WRITE (LUN,200,ERR=13) T -C -C Display TITLE centered above the plot: -C -C Y0 = WR + 3.0*T -C WRITE (LUN,230,ERR=13) TITLE, Y0 -C 230 FORMAT (A80/' stringwidth pop 2 div neg ',F12.6, -C . ' moveto') -C WRITE (LUN,240,ERR=13) TITLE -C 240 FORMAT (A80/' show') -C IF (ANNOT) THEN -C -C Display the window center and radius below the plot. -C -C X0 = -WR -C Y0 = -WR - 50.0/SF -C WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,250,ERR=13) ELAT, ELON -C Y0 = Y0 - 2.0*T -C WRITE (LUN,210,ERR=13) X0, Y0 -C WRITE (LUN,260,ERR=13) A -C 250 FORMAT ('(Window center: ELAT = ',F7.2, -C . ', ELON = ',F8.2,') show') -C 260 FORMAT ('(Angular extent: A = ',F5.2,') show') -C ENDIF -C -C Paint the path and output the showpage command and -C end-of-file indicator. -C - WRITE (LUN,270,ERR=13) - 270 FORMAT ('stroke'/ - . 'showpage'/ - . '%%EOF') -C -C HP's interpreters require a one-byte End-of-PostScript-Job -C indicator (to eliminate a timeout error message): -C ASCII 4. -C - WRITE (LUN,280,ERR=13) CHAR(4) - 280 FORMAT (A1) -C -C No error encountered. -C - IER = 0 - RETURN -C -C Invalid input parameter LUN, PLTSIZ, N, or NT. -C - 11 IER = 1 - RETURN -C -C Invalid input parameter ELAT, ELON, or A. -C - 12 IER = 2 - RETURN -C -C Error writing to unit LUN. -C - 13 IER = 3 - RETURN - END diff --git a/grid_gen/global_scvt/runit.csh b/grid_gen/global_scvt/runit.csh deleted file mode 100644 index 80f1df522..000000000 --- a/grid_gen/global_scvt/runit.csh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/csh -rm -f locs.dat* -cp centroids.162.dat locs.dat - -make clean -make - -setenv NAME x1 - -foreach RES (162 642 2562 10242 40962) -#foreach RES (162 642 2562 10242 40962 163842) - -setenv RES 162 -echo "&domains" > namelist.input -echo " np = "$RES"" >> namelist.input -echo " locs_as_xyz = .true." >> namelist.input -echo " n_scvt_iterations = 10000" >> namelist.input -cat convergence >> namelist.input -echo "/" >> namelist.input -grid_gen -grid_ref -mv -f grid.nc grid.$NAME.$RES.nc -mv -f locs.dat.out locs.$NAME.$RES.dat -mv -f locs.dat.out.refined locs.dat - -end diff --git a/grid_gen/global_scvt/src/Makefile b/grid_gen/global_scvt/src/Makefile deleted file mode 100644 index 9f44d4d9e..000000000 --- a/grid_gen/global_scvt/src/Makefile +++ /dev/null @@ -1,39 +0,0 @@ -.SUFFIXES: .F .f .o - -OBJS = STRIPACK.o module_grid_params.o module_grid_constants.o module_data_types.o module_sphere_utilities.o module_voronoi_utils.o module_grid_gen_utils.o module_scvt.o module_write_netcdf.o module_grid_meta.o grid_gen.o - -all: $(OBJS) - $(FC) $(PROMOTION) $(LDFLAGS) -o grid_gen $(OBJS) -L$(NETCDF)/lib -lnetcdff -lnetcdf - - -grid_gen.o: module_grid_params.o module_grid_constants.o module_data_types.o module_grid_gen_utils.o module_voronoi_utils.o STRIPACK.o module_scvt.o module_grid_meta.o - -module_grid_gen_utils.o: module_sphere_utilities.o - -module_scvt.o: module_data_types.o module_sphere_utilities.o module_voronoi_utils.o module_grid_constants.o module_grid_params.o - -module_write_netcdf.o: module_grid_params.o - -module_data_types.o: - -module_grid_meta.o: module_data_types.o module_grid_constants.o module_sphere_utilities.o module_write_netcdf.o - -module_sphere_utilities.o: module_data_types.o - -module_grid_constants.o: - -module_grid_params.o: - -module_voronoi_utils.o: module_grid_constants.o STRIPACK.o - - -.F.o: - cpp -C -P -traditional $(CPPFLAGS) $< > $*.f90 - $(FC) $(FFLAGS) $(PROMOTION) -c $*.f90 -I$(NETCDF)/include - rm -f $*.f90 - -.f.o: - $(FC) $(F77FLAGS) $(PROMOTION) -c $< - -clean: - rm -f *.o *.mod grid_gen diff --git a/grid_gen/global_scvt/src/STRIPACK.f b/grid_gen/global_scvt/src/STRIPACK.f deleted file mode 100644 index 968c818a5..000000000 --- a/grid_gen/global_scvt/src/STRIPACK.f +++ /dev/null @@ -1,6706 +0,0 @@ - MODULE STRIPACK - - CONTAINS - - SUBROUTINE ADDNOD (NST,K,X,Y,Z, LIST,LPTR,LEND, - . LNEW, IER) - INTEGER NST, K, LIST(*), LPTR(*), LEND(K), LNEW, IER - REAL X(K), Y(K), Z(K) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/08/99 -C -C This subroutine adds node K to a triangulation of the -C convex hull of nodes 1,...,K-1, producing a triangulation -C of the convex hull of nodes 1,...,K. -C -C The algorithm consists of the following steps: node K -C is located relative to the triangulation (TRFIND), its -C index is added to the data structure (INTADD or BDYADD), -C and a sequence of swaps (SWPTST and SWAP) are applied to -C the arcs opposite K so that all arcs incident on node K -C and opposite node K are locally optimal (satisfy the cir- -C cumcircle test). Thus, if a Delaunay triangulation is -C input, a Delaunay triangulation will result. -C -C -C On input: -C -C NST = Index of a node at which TRFIND begins its -C search. Search time depends on the proximity -C of this node to K. If NST < 1, the search is -C begun at node K-1. -C -C K = Nodal index (index for X, Y, Z, and LEND) of the -C new node to be added. K .GE. 4. -C -C X,Y,Z = Arrays of length .GE. K containing Car- -C tesian coordinates of the nodes. -C (X(I),Y(I),Z(I)) defines node I for -C I = 1,...,K. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure associated with -C the triangulation of nodes 1 -C to K-1. The array lengths are -C assumed to be large enough to -C add node K. Refer to Subrou- -C tine TRMESH. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node K as the -C last entry unless IER .NE. 0 -C and IER .NE. -3, in which case -C the arrays are not altered. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = -1 if K is outside its valid range -C on input. -C IER = -2 if all nodes (including K) are col- -C linear (lie on a common geodesic). -C IER = L if nodes L and K coincide for some -C L < K. -C -C Modules required by ADDNOD: BDYADD, COVSPH, INSERT, -C INTADD, JRAND, LSTPTR, -C STORE, SWAP, SWPTST, -C TRFIND -C -C Intrinsic function called by ADDNOD: ABS -C -C*********************************************************** -C - INTEGER I1, I2, I3, IO1, IO2, IN1, IST, KK, KM1, L, - . LP, LPF, LPO1, LPO1S - REAL B1, B2, B3, P(3) -C -C Local parameters: -C -C B1,B2,B3 = Unnormalized barycentric coordinates returned -C by TRFIND. -C I1,I2,I3 = Vertex indexes of a triangle containing K -C IN1 = Vertex opposite K: first neighbor of IO2 -C that precedes IO1. IN1,IO1,IO2 are in -C counterclockwise order. -C IO1,IO2 = Adjacent neighbors of K defining an arc to -C be tested for a swap -C IST = Index of node at which TRFIND begins its search -C KK = Local copy of K -C KM1 = K-1 -C L = Vertex index (I1, I2, or I3) returned in IER -C if node K coincides with a vertex -C LP = LIST pointer -C LPF = LIST pointer to the first neighbor of K -C LPO1 = LIST pointer to IO1 -C LPO1S = Saved value of LPO1 -C P = Cartesian coordinates of node K -C - KK = K - IF (KK .LT. 4) GO TO 3 -C -C Initialization: -C - KM1 = KK - 1 - IST = NST - IF (IST .LT. 1) IST = KM1 - P(1) = X(KK) - P(2) = Y(KK) - P(3) = Z(KK) -C -C Find a triangle (I1,I2,I3) containing K or the rightmost -C (I1) and leftmost (I2) visible boundary nodes as viewed -C from node K. -C - CALL TRFIND (IST,P,KM1,X,Y,Z,LIST,LPTR,LEND, B1,B2,B3, - . I1,I2,I3) -C -C Test for collinear or duplicate nodes. -C - IF (I1 .EQ. 0) GO TO 4 - IF (I3 .NE. 0) THEN - L = I1 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - L = I2 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - L = I3 - IF (P(1) .EQ. X(L) .AND. P(2) .EQ. Y(L) .AND. - . P(3) .EQ. Z(L)) GO TO 5 - CALL INTADD (KK,I1,I2,I3, LIST,LPTR,LEND,LNEW ) - ELSE - IF (I1 .NE. I2) THEN - CALL BDYADD (KK,I1,I2, LIST,LPTR,LEND,LNEW ) - ELSE - CALL COVSPH (KK,I1, LIST,LPTR,LEND,LNEW ) - ENDIF - ENDIF - IER = 0 -C -C Initialize variables for optimization of the -C triangulation. -C - LP = LEND(KK) - LPF = LPTR(LP) - IO2 = LIST(LPF) - LPO1 = LPTR(LPF) - IO1 = ABS(LIST(LPO1)) -C -C Begin loop: find the node opposite K. -C - 1 LP = LSTPTR(LEND(IO1),IO2,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 2 - LP = LPTR(LP) - IN1 = ABS(LIST(LP)) -C -C Swap test: if a swap occurs, two new arcs are -C opposite K and must be tested. -C - LPO1S = LPO1 - IF ( .NOT. SWPTST(IN1,KK,IO1,IO2,X,Y,Z) ) GO TO 2 - CALL SWAP (IN1,KK,IO1,IO2, LIST,LPTR,LEND, LPO1) - IF (LPO1 .EQ. 0) THEN -C -C A swap is not possible because KK and IN1 are already -C adjacent. This error in SWPTST only occurs in the -C neutral case and when there are nearly duplicate -C nodes. -C - LPO1 = LPO1S - GO TO 2 - ENDIF - IO1 = IN1 - GO TO 1 -C -C No swap occurred. Test for termination and reset -C IO2 and IO1. -C - 2 IF (LPO1 .EQ. LPF .OR. LIST(LPO1) .LT. 0) RETURN - IO2 = IO1 - LPO1 = LPTR(LPO1) - IO1 = ABS(LIST(LPO1)) - GO TO 1 -C -C KK < 4. -C - 3 IER = -1 - RETURN -C -C All nodes are collinear. -C - 4 IER = -2 - RETURN -C -C Nodes L and K coincide. -C - 5 IER = L - RETURN - END SUBROUTINE - REAL FUNCTION AREAS (V1,V2,V3) - REAL V1(3), V2(3), V3(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 09/18/90 -C -C This function returns the area of a spherical triangle -C on the unit sphere. -C -C -C On input: -C -C V1,V2,V3 = Arrays of length 3 containing the Carte- -C sian coordinates of unit vectors (the -C three triangle vertices in any order). -C These vectors, if nonzero, are implicitly -C scaled to have length 1. -C -C Input parameters are not altered by this function. -C -C On output: -C -C AREAS = Area of the spherical triangle defined by -C V1, V2, and V3 in the range 0 to 2*PI (the -C area of a hemisphere). AREAS = 0 (or 2*PI) -C if and only if V1, V2, and V3 lie in (or -C close to) a plane containing the origin. -C -C Modules required by AREAS: None -C -C Intrinsic functions called by AREAS: ACOS, DBLE, REAL, -C SQRT -C -C*********************************************************** -C - DOUBLE PRECISION A1, A2, A3, CA1, CA2, CA3, DV1(3), - . DV2(3), DV3(3), S12, S23, S31, - . U12(3), U23(3), U31(3) - INTEGER I -C -C Local parameters: -C -C A1,A2,A3 = Interior angles of the spherical triangle -C CA1,CA2,CA3 = cos(A1), cos(A2), and cos(A3), respectively -C DV1,DV2,DV3 = Double Precision copies of V1, V2, and V3 -C I = DO-loop index and index for Uij -C S12,S23,S31 = Sum of squared components of U12, U23, U31 -C U12,U23,U31 = Unit normal vectors to the planes defined by -C pairs of triangle vertices -C - DO 1 I = 1,3 - DV1(I) = DBLE(V1(I)) - DV2(I) = DBLE(V2(I)) - DV3(I) = DBLE(V3(I)) - 1 CONTINUE -C -C Compute cross products Uij = Vi X Vj. -C - U12(1) = DV1(2)*DV2(3) - DV1(3)*DV2(2) - U12(2) = DV1(3)*DV2(1) - DV1(1)*DV2(3) - U12(3) = DV1(1)*DV2(2) - DV1(2)*DV2(1) -C - U23(1) = DV2(2)*DV3(3) - DV2(3)*DV3(2) - U23(2) = DV2(3)*DV3(1) - DV2(1)*DV3(3) - U23(3) = DV2(1)*DV3(2) - DV2(2)*DV3(1) -C - U31(1) = DV3(2)*DV1(3) - DV3(3)*DV1(2) - U31(2) = DV3(3)*DV1(1) - DV3(1)*DV1(3) - U31(3) = DV3(1)*DV1(2) - DV3(2)*DV1(1) -C -C Normalize Uij to unit vectors. -C - S12 = 0.D0 - S23 = 0.D0 - S31 = 0.D0 - DO 2 I = 1,3 - S12 = S12 + U12(I)*U12(I) - S23 = S23 + U23(I)*U23(I) - S31 = S31 + U31(I)*U31(I) - 2 CONTINUE -C -C Test for a degenerate triangle associated with collinear -C vertices. -C - IF (S12 .EQ. 0.D0 .OR. S23 .EQ. 0.D0 .OR. - . S31 .EQ. 0.D0) THEN - AREAS = 0. - RETURN - ENDIF - S12 = SQRT(S12) - S23 = SQRT(S23) - S31 = SQRT(S31) - DO 3 I = 1,3 - U12(I) = U12(I)/S12 - U23(I) = U23(I)/S23 - U31(I) = U31(I)/S31 - 3 CONTINUE -C -C Compute interior angles Ai as the dihedral angles between -C planes: -C CA1 = cos(A1) = - -C CA2 = cos(A2) = - -C CA3 = cos(A3) = - -C - CA1 = -U12(1)*U31(1)-U12(2)*U31(2)-U12(3)*U31(3) - CA2 = -U23(1)*U12(1)-U23(2)*U12(2)-U23(3)*U12(3) - CA3 = -U31(1)*U23(1)-U31(2)*U23(2)-U31(3)*U23(3) - IF (CA1 .LT. -1.D0) CA1 = -1.D0 - IF (CA1 .GT. 1.D0) CA1 = 1.D0 - IF (CA2 .LT. -1.D0) CA2 = -1.D0 - IF (CA2 .GT. 1.D0) CA2 = 1.D0 - IF (CA3 .LT. -1.D0) CA3 = -1.D0 - IF (CA3 .GT. 1.D0) CA3 = 1.D0 - A1 = ACOS(CA1) - A2 = ACOS(CA2) - A3 = ACOS(CA3) -C -C Compute AREAS = A1 + A2 + A3 - PI. -C - AREAS = REAL(A1 + A2 + A3 - ACOS(-1.D0)) - IF (AREAS .LT. 0.) AREAS = 0. - RETURN - END FUNCTION - SUBROUTINE BDYADD (KK,I1,I2, LIST,LPTR,LEND,LNEW ) - INTEGER KK, I1, I2, LIST(*), LPTR(*), LEND(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/11/96 -C -C This subroutine adds a boundary node to a triangulation -C of a set of KK-1 points on the unit sphere. The data -C structure is updated with the insertion of node KK, but no -C optimization is performed. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C KK = Index of a node to be connected to the sequence -C of all visible boundary nodes. KK .GE. 1 and -C KK must not be equal to I1 or I2. -C -C I1 = First (rightmost as viewed from KK) boundary -C node in the triangulation that is visible from -C node KK (the line segment KK-I1 intersects no -C arcs. -C -C I2 = Last (leftmost) boundary node that is visible -C from node KK. I1 and I2 may be determined by -C Subroutine TRFIND. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C Nodes I1 and I2 must be in- -C cluded in the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK. Node -C KK is connected to I1, I2, and -C all boundary nodes in between. -C -C Module required by BDYADD: INSERT -C -C*********************************************************** -C - INTEGER K, LP, LSAV, N1, N2, NEXT, NSAV -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C LSAV = LIST pointer -C N1,N2 = Local copies of I1 and I2, respectively -C NEXT = Boundary node visible from K -C NSAV = Boundary node visible from K -C - K = KK - N1 = I1 - N2 = I2 -C -C Add K as the last neighbor of N1. -C - LP = LEND(N1) - LSAV = LPTR(LP) - LPTR(LP) = LNEW - LIST(LNEW) = -K - LPTR(LNEW) = LSAV - LEND(N1) = LNEW - LNEW = LNEW + 1 - NEXT = -LIST(LP) - LIST(LP) = NEXT - NSAV = NEXT -C -C Loop on the remaining boundary nodes between N1 and N2, -C adding K as the first neighbor. -C - 1 LP = LEND(NEXT) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - IF (NEXT .EQ. N2) GO TO 2 - NEXT = -LIST(LP) - LIST(LP) = NEXT - GO TO 1 -C -C Add the boundary nodes between N1 and N2 as neighbors -C of node K. -C - 2 LSAV = LNEW - LIST(LNEW) = N1 - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - NEXT = NSAV -C - 3 IF (NEXT .EQ. N2) GO TO 4 - LIST(LNEW) = NEXT - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - LP = LEND(NEXT) - NEXT = LIST(LP) - GO TO 3 -C - 4 LIST(LNEW) = -N2 - LPTR(LNEW) = LSAV - LEND(K) = LNEW - LNEW = LNEW + 1 - RETURN - END SUBROUTINE - SUBROUTINE BNODES (N,LIST,LPTR,LEND, NODES,NB,NA,NT) - INTEGER N, LIST(*), LPTR(*), LEND(N), NODES(*), NB, - . NA, NT -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/26/96 -C -C Given a triangulation of N nodes on the unit sphere -C created by Subroutine TRMESH, this subroutine returns an -C array containing the indexes (if any) of the counterclock- -C wise-ordered sequence of boundary nodes -- the nodes on -C the boundary of the convex hull of the set of nodes. (The -C boundary is empty if the nodes do not lie in a single -C hemisphere.) The numbers of boundary nodes, arcs, and -C triangles are also returned. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C The above parameters are not altered by this routine. -C -C NODES = Integer array of length at least NB -C (NB .LE. N). -C -C On output: -C -C NODES = Ordered sequence of boundary node indexes -C in the range 1 to N (in the first NB loca- -C tions). -C -C NB = Number of boundary nodes. -C -C NA,NT = Number of arcs and triangles, respectively, -C in the triangulation. -C -C Modules required by BNODES: None -C -C*********************************************************** -C - INTEGER K, LP, N0, NN, NST -C -C Local parameters: -C -C K = NODES index -C LP = LIST pointer -C N0 = Boundary node to be added to NODES -C NN = Local copy of N -C NST = First element of nodes (arbitrarily chosen to be -C the one with smallest index) -C - NN = N -C -C Search for a boundary node. -C - DO 1 NST = 1,NN - LP = LEND(NST) - IF (LIST(LP) .LT. 0) GO TO 2 - 1 CONTINUE -C -C The triangulation contains no boundary nodes. -C - NB = 0 - NA = 3*(NN-2) - NT = 2*(NN-2) - RETURN -C -C NST is the first boundary node encountered. Initialize -C for traversal of the boundary. -C - 2 NODES(1) = NST - K = 1 - N0 = NST -C -C Traverse the boundary in counterclockwise order. -C - 3 LP = LEND(N0) - LP = LPTR(LP) - N0 = LIST(LP) - IF (N0 .EQ. NST) GO TO 4 - K = K + 1 - NODES(K) = N0 - GO TO 3 -C -C Store the counts. -C - 4 NB = K - NT = 2*N - NB - 2 - NA = NT + N - 1 - RETURN - END SUBROUTINE - SUBROUTINE CIRCUM (V1,V2,V3, C,IER) - INTEGER IER - REAL V1(3), V2(3), V3(3), C(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/29/95 -C -C This subroutine returns the circumcenter of a spherical -C triangle on the unit sphere: the point on the sphere sur- -C face that is equally distant from the three triangle -C vertices and lies in the same hemisphere, where distance -C is taken to be arc-length on the sphere surface. -C -C -C On input: -C -C V1,V2,V3 = Arrays of length 3 containing the Carte- -C sian coordinates of the three triangle -C vertices (unit vectors) in CCW order. -C -C The above parameters are not altered by this routine. -C -C C = Array of length 3. -C -C On output: -C -C C = Cartesian coordinates of the circumcenter unless -C IER > 0, in which case C is not defined. C = -C (V2-V1) X (V3-V1) normalized to a unit vector. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if V1, V2, and V3 lie on a common -C line: (V2-V1) X (V3-V1) = 0. -C (The vertices are not tested for validity.) -C -C Modules required by CIRCUM: None -C -C Intrinsic function called by CIRCUM: SQRT -C -C*********************************************************** -C - INTEGER I - REAL CNORM, CU(3), E1(3), E2(3) -C -C Local parameters: -C -C CNORM = Norm of CU: used to compute C -C CU = Scalar multiple of C: E1 X E2 -C E1,E2 = Edges of the underlying planar triangle: -C V2-V1 and V3-V1, respectively -C I = DO-loop index -C - DO 1 I = 1,3 - E1(I) = V2(I) - V1(I) - E2(I) = V3(I) - V1(I) - 1 CONTINUE -C -C Compute CU = E1 X E2 and CNORM**2. -C - CU(1) = E1(2)*E2(3) - E1(3)*E2(2) - CU(2) = E1(3)*E2(1) - E1(1)*E2(3) - CU(3) = E1(1)*E2(2) - E1(2)*E2(1) - CNORM = CU(1)*CU(1) + CU(2)*CU(2) + CU(3)*CU(3) -C -C The vertices lie on a common line if and only if CU is -C the zero vector. -C - IF (CNORM .NE. 0.) THEN -C -C No error: compute C. -C - CNORM = SQRT(CNORM) - DO 2 I = 1,3 - C(I) = CU(I)/CNORM - 2 CONTINUE - IER = 0 - ELSE -C -C CU = 0. -C - IER = 1 - ENDIF - RETURN - END SUBROUTINE - SUBROUTINE COVSPH (KK,N0, LIST,LPTR,LEND,LNEW ) - INTEGER KK, N0, LIST(*), LPTR(*), LEND(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine connects an exterior node KK to all -C boundary nodes of a triangulation of KK-1 points on the -C unit sphere, producing a triangulation that covers the -C sphere. The data structure is updated with the addition -C of node KK, but no optimization is performed. All boun- -C dary nodes must be visible from node KK. -C -C -C On input: -C -C KK = Index of the node to be connected to the set of -C all boundary nodes. KK .GE. 4. -C -C N0 = Index of a boundary node (in the range 1 to -C KK-1). N0 may be determined by Subroutine -C TRFIND. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C Node N0 must be included in -C the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK as the -C last entry. The updated -C triangulation contains no -C boundary nodes. -C -C Module required by COVSPH: INSERT -C -C*********************************************************** -C - INTEGER K, LP, LSAV, NEXT, NST -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C LSAV = LIST pointer -C NEXT = Boundary node visible from K -C NST = Local copy of N0 -C - K = KK - NST = N0 -C -C Traverse the boundary in clockwise order, inserting K as -C the first neighbor of each boundary node, and converting -C the boundary node to an interior node. -C - NEXT = NST - 1 LP = LEND(NEXT) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - NEXT = -LIST(LP) - LIST(LP) = NEXT - IF (NEXT .NE. NST) GO TO 1 -C -C Traverse the boundary again, adding each node to K's -C adjacency list. -C - LSAV = LNEW - 2 LP = LEND(NEXT) - LIST(LNEW) = NEXT - LPTR(LNEW) = LNEW + 1 - LNEW = LNEW + 1 - NEXT = LIST(LP) - IF (NEXT .NE. NST) GO TO 2 -C - LPTR(LNEW-1) = LSAV - LEND(K) = LNEW - 1 - RETURN - END SUBROUTINE - SUBROUTINE CRLIST (N,NCOL,X,Y,Z,LIST,LEND, LPTR,LNEW, - . LTRI, LISTC,NB,XC,YC,ZC,RC,IER) - INTEGER N, NCOL, LIST(*), LEND(N), LPTR(*), LNEW, - . LTRI(6,NCOL), LISTC(*), NB, IER - REAL X(N), Y(N), Z(N), XC(*), YC(*), ZC(*), RC(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/05/98 -C -C Given a Delaunay triangulation of nodes on the surface -C of the unit sphere, this subroutine returns the set of -C triangle circumcenters corresponding to Voronoi vertices, -C along with the circumradii and a list of triangle indexes -C LISTC stored in one-to-one correspondence with LIST/LPTR -C entries. -C -C A triangle circumcenter is the point (unit vector) lying -C at the same angular distance from the three vertices and -C contained in the same hemisphere as the vertices. (Note -C that the negative of a circumcenter is also equidistant -C from the vertices.) If the triangulation covers the sur- -C face, the Voronoi vertices are the circumcenters of the -C triangles in the Delaunay triangulation. LPTR, LEND, and -C LNEW are not altered in this case. -C -C On the other hand, if the nodes are contained in a sin- -C gle hemisphere, the triangulation is implicitly extended -C to the entire surface by adding pseudo-arcs (of length -C greater than 180 degrees) between boundary nodes forming -C pseudo-triangles whose 'circumcenters' are included in the -C list. This extension to the triangulation actually con- -C sists of a triangulation of the set of boundary nodes in -C which the swap test is reversed (a non-empty circumcircle -C test). The negative circumcenters are stored as the -C pseudo-triangle 'circumcenters'. LISTC, LPTR, LEND, and -C LNEW contain a data structure corresponding to the ex- -C tended triangulation (Voronoi diagram), but LIST is not -C altered in this case. Thus, if it is necessary to retain -C the original (unextended) triangulation data structure, -C copies of LPTR and LNEW must be saved before calling this -C routine. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C Note that, if N = 3, there are only two Voronoi -C vertices separated by 180 degrees, and the -C Voronoi regions are not well defined. -C -C NCOL = Number of columns reserved for LTRI. This -C must be at least NB-2, where NB is the number -C of boundary nodes. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C LIST = Integer array containing the set of adjacency -C lists. Refer to Subroutine TRMESH. -C -C LEND = Set of pointers to ends of adjacency lists. -C Refer to Subroutine TRMESH. -C -C The above parameters are not altered by this routine. -C -C LPTR = Array of pointers associated with LIST. Re- -C fer to Subroutine TRMESH. -C -C LNEW = Pointer to the first empty location in LIST -C and LPTR (list length plus one). -C -C LTRI = Integer work space array dimensioned 6 by -C NCOL, or unused dummy parameter if NB = 0. -C -C LISTC = Integer array of length at least 3*NT, where -C NT = 2*N-4 is the number of triangles in the -C triangulation (after extending it to cover -C the entire surface if necessary). -C -C XC,YC,ZC,RC = Arrays of length NT = 2*N-4. -C -C On output: -C -C LPTR = Array of pointers associated with LISTC: -C updated for the addition of pseudo-triangles -C if the original triangulation contains -C boundary nodes (NB > 0). -C -C LNEW = Pointer to the first empty location in LISTC -C and LPTR (list length plus one). LNEW is not -C altered if NB = 0. -C -C LTRI = Triangle list whose first NB-2 columns con- -C tain the indexes of a clockwise-ordered -C sequence of vertices (first three rows) -C followed by the LTRI column indexes of the -C triangles opposite the vertices (or 0 -C denoting the exterior region) in the last -C three rows. This array is not generally of -C any use. -C -C LISTC = Array containing triangle indexes (indexes -C to XC, YC, ZC, and RC) stored in 1-1 corres- -C pondence with LIST/LPTR entries (or entries -C that would be stored in LIST for the -C extended triangulation): the index of tri- -C angle (N1,N2,N3) is stored in LISTC(K), -C LISTC(L), and LISTC(M), where LIST(K), -C LIST(L), and LIST(M) are the indexes of N2 -C as a neighbor of N1, N3 as a neighbor of N2, -C and N1 as a neighbor of N3. The Voronoi -C region associated with a node is defined by -C the CCW-ordered sequence of circumcenters in -C one-to-one correspondence with its adjacency -C list (in the extended triangulation). -C -C NB = Number of boundary nodes unless IER = 1. -C -C XC,YC,ZC = Arrays containing the Cartesian coordi- -C nates of the triangle circumcenters -C (Voronoi vertices). XC(I)**2 + YC(I)**2 -C + ZC(I)**2 = 1. The first NB-2 entries -C correspond to pseudo-triangles if NB > 0. -C -C RC = Array containing circumradii (the arc lengths -C or angles between the circumcenters and associ- -C ated triangle vertices) in 1-1 correspondence -C with circumcenters. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if N < 3. -C IER = 2 if NCOL < NB-2. -C IER = 3 if a triangle is degenerate (has ver- -C tices lying on a common geodesic). -C -C Modules required by CRLIST: CIRCUM, LSTPTR, SWPTST -C -C Intrinsic functions called by CRLIST: ABS, ACOS -C -C*********************************************************** -C - INTEGER I1, I2, I3, I4, IERR, KT, KT1, KT2, KT11, - . KT12, KT21, KT22, LP, LPL, LPN, N0, N1, N2, - . N3, N4, NM2, NN, NT - LOGICAL SWP - REAL C(3), T, V1(3), V2(3), V3(3) -C -C Local parameters: -C -C C = Circumcenter returned by Subroutine CIRCUM -C I1,I2,I3 = Permutation of (1,2,3): LTRI row indexes -C I4 = LTRI row index in the range 1 to 3 -C IERR = Error flag for calls to CIRCUM -C KT = Triangle index -C KT1,KT2 = Indexes of a pair of adjacent pseudo-triangles -C KT11,KT12 = Indexes of the pseudo-triangles opposite N1 -C and N2 as vertices of KT1 -C KT21,KT22 = Indexes of the pseudo-triangles opposite N1 -C and N2 as vertices of KT2 -C LP,LPN = LIST pointers -C LPL = LIST pointer of the last neighbor of N1 -C N0 = Index of the first boundary node (initial -C value of N1) in the loop on boundary nodes -C used to store the pseudo-triangle indexes -C in LISTC -C N1,N2,N3 = Nodal indexes defining a triangle (CCW order) -C or pseudo-triangle (clockwise order) -C N4 = Index of the node opposite N2 -> N1 -C NM2 = N-2 -C NN = Local copy of N -C NT = Number of pseudo-triangles: NB-2 -C SWP = Logical variable set to TRUE in each optimiza- -C tion loop (loop on pseudo-arcs) iff a swap -C is performed -C V1,V2,V3 = Vertices of triangle KT = (N1,N2,N3) sent to -C Subroutine CIRCUM -C - NN = N - NB = 0 - NT = 0 - IF (NN .LT. 3) GO TO 21 -C -C Search for a boundary node N1. -C - DO 1 N1 = 1,NN - LP = LEND(N1) - IF (LIST(LP) .LT. 0) GO TO 2 - 1 CONTINUE -C -C The triangulation already covers the sphere. -C - GO TO 9 -C -C There are NB .GE. 3 boundary nodes. Add NB-2 pseudo- -C triangles (N1,N2,N3) by connecting N3 to the NB-3 -C boundary nodes to which it is not already adjacent. -C -C Set N3 and N2 to the first and last neighbors, -C respectively, of N1. -C - 2 N2 = -LIST(LP) - LP = LPTR(LP) - N3 = LIST(LP) -C -C Loop on boundary arcs N1 -> N2 in clockwise order, -C storing triangles (N1,N2,N3) in column NT of LTRI -C along with the indexes of the triangles opposite -C the vertices. -C - 3 NT = NT + 1 - IF (NT .LE. NCOL) THEN - LTRI(1,NT) = N1 - LTRI(2,NT) = N2 - LTRI(3,NT) = N3 - LTRI(4,NT) = NT + 1 - LTRI(5,NT) = NT - 1 - LTRI(6,NT) = 0 - ENDIF - N1 = N2 - LP = LEND(N1) - N2 = -LIST(LP) - IF (N2 .NE. N3) GO TO 3 -C - NB = NT + 2 - IF (NCOL .LT. NT) GO TO 22 - LTRI(4,NT) = 0 - IF (NT .EQ. 1) GO TO 7 -C -C Optimize the exterior triangulation (set of pseudo- -C triangles) by applying swaps to the pseudo-arcs N1-N2 -C (pairs of adjacent pseudo-triangles KT1 and KT2 > KT1). -C The loop on pseudo-arcs is repeated until no swaps are -C performed. -C - 4 SWP = .FALSE. - DO 6 KT1 = 1,NT-1 - DO 5 I3 = 1,3 - KT2 = LTRI(I3+3,KT1) - IF (KT2 .LE. KT1) GO TO 5 -C -C The LTRI row indexes (I1,I2,I3) of triangle KT1 = -C (N1,N2,N3) are a cyclical permutation of (1,2,3). -C - IF (I3 .EQ. 1) THEN - I1 = 2 - I2 = 3 - ELSEIF (I3 .EQ. 2) THEN - I1 = 3 - I2 = 1 - ELSE - I1 = 1 - I2 = 2 - ENDIF - N1 = LTRI(I1,KT1) - N2 = LTRI(I2,KT1) - N3 = LTRI(I3,KT1) -C -C KT2 = (N2,N1,N4) for N4 = LTRI(I,KT2), where -C LTRI(I+3,KT2) = KT1. -C - IF (LTRI(4,KT2) .EQ. KT1) THEN - I4 = 1 - ELSEIF (LTRI(5,KT2) .EQ. KT1) THEN - I4 = 2 - ELSE - I4 = 3 - ENDIF - N4 = LTRI(I4,KT2) -C -C The empty circumcircle test is reversed for the pseudo- -C triangles. The reversal is implicit in the clockwise -C ordering of the vertices. -C - IF ( .NOT. SWPTST(N1,N2,N3,N4,X,Y,Z) ) GO TO 5 -C -C Swap arc N1-N2 for N3-N4. KTij is the triangle opposite -C Nj as a vertex of KTi. -C - SWP = .TRUE. - KT11 = LTRI(I1+3,KT1) - KT12 = LTRI(I2+3,KT1) - IF (I4 .EQ. 1) THEN - I2 = 2 - I1 = 3 - ELSEIF (I4 .EQ. 2) THEN - I2 = 3 - I1 = 1 - ELSE - I2 = 1 - I1 = 2 - ENDIF - KT21 = LTRI(I1+3,KT2) - KT22 = LTRI(I2+3,KT2) - LTRI(1,KT1) = N4 - LTRI(2,KT1) = N3 - LTRI(3,KT1) = N1 - LTRI(4,KT1) = KT12 - LTRI(5,KT1) = KT22 - LTRI(6,KT1) = KT2 - LTRI(1,KT2) = N3 - LTRI(2,KT2) = N4 - LTRI(3,KT2) = N2 - LTRI(4,KT2) = KT21 - LTRI(5,KT2) = KT11 - LTRI(6,KT2) = KT1 -C -C Correct the KT11 and KT22 entries that changed. -C - IF (KT11 .NE. 0) THEN - I4 = 4 - IF (LTRI(4,KT11) .NE. KT1) THEN - I4 = 5 - IF (LTRI(5,KT11) .NE. KT1) I4 = 6 - ENDIF - LTRI(I4,KT11) = KT2 - ENDIF - IF (KT22 .NE. 0) THEN - I4 = 4 - IF (LTRI(4,KT22) .NE. KT2) THEN - I4 = 5 - IF (LTRI(5,KT22) .NE. KT2) I4 = 6 - ENDIF - LTRI(I4,KT22) = KT1 - ENDIF - 5 CONTINUE - 6 CONTINUE - IF (SWP) GO TO 4 -C -C Compute and store the negative circumcenters and radii of -C the pseudo-triangles in the first NT positions. -C - 7 DO 8 KT = 1,NT - N1 = LTRI(1,KT) - N2 = LTRI(2,KT) - N3 = LTRI(3,KT) - V1(1) = X(N1) - V1(2) = Y(N1) - V1(3) = Z(N1) - V2(1) = X(N2) - V2(2) = Y(N2) - V2(3) = Z(N2) - V3(1) = X(N3) - V3(2) = Y(N3) - V3(3) = Z(N3) - CALL CIRCUM (V1,V2,V3, C,IERR) - IF (IERR .NE. 0) GO TO 23 -C -C Store the negative circumcenter and radius (computed -C from ). -C - XC(KT) = C(1) - YC(KT) = C(2) - ZC(KT) = C(3) - T = V1(1)*C(1) + V1(2)*C(2) + V1(3)*C(3) - IF (T .LT. -1.0) T = -1.0 - IF (T .GT. 1.0) T = 1.0 - RC(KT) = ACOS(T) - 8 CONTINUE -C -C Compute and store the circumcenters and radii of the -C actual triangles in positions KT = NT+1, NT+2, ... -C Also, store the triangle indexes KT in the appropriate -C LISTC positions. -C - 9 KT = NT -C -C Loop on nodes N1. -C - NM2 = NN - 2 - DO 12 N1 = 1,NM2 - LPL = LEND(N1) - LP = LPL - N3 = LIST(LP) -C -C Loop on adjacent neighbors N2,N3 of N1 for which N2 > N1 -C and N3 > N1. -C - 10 LP = LPTR(LP) - N2 = N3 - N3 = ABS(LIST(LP)) - IF (N2 .LE. N1 .OR. N3 .LE. N1) GO TO 11 - KT = KT + 1 -C -C Compute the circumcenter C of triangle KT = (N1,N2,N3). -C - V1(1) = X(N1) - V1(2) = Y(N1) - V1(3) = Z(N1) - V2(1) = X(N2) - V2(2) = Y(N2) - V2(3) = Z(N2) - V3(1) = X(N3) - V3(2) = Y(N3) - V3(3) = Z(N3) - CALL CIRCUM (V1,V2,V3, C,IERR) - IF (IERR .NE. 0) GO TO 23 -C -C Store the circumcenter, radius and triangle index. -C - XC(KT) = C(1) - YC(KT) = C(2) - ZC(KT) = C(3) - T = V1(1)*C(1) + V1(2)*C(2) + V1(3)*C(3) - IF (T .LT. -1.0) T = -1.0 - IF (T .GT. 1.0) T = 1.0 - RC(KT) = ACOS(T) -C -C Store KT in LISTC(LPN), where Abs(LIST(LPN)) is the -C index of N2 as a neighbor of N1, N3 as a neighbor -C of N2, and N1 as a neighbor of N3. -C - LPN = LSTPTR(LPL,N2,LIST,LPTR) - LISTC(LPN) = KT - LPN = LSTPTR(LEND(N2),N3,LIST,LPTR) - LISTC(LPN) = KT - LPN = LSTPTR(LEND(N3),N1,LIST,LPTR) - LISTC(LPN) = KT - 11 IF (LP .NE. LPL) GO TO 10 - 12 CONTINUE - IF (NT .EQ. 0) GO TO 20 -C -C Store the first NT triangle indexes in LISTC. -C -C Find a boundary triangle KT1 = (N1,N2,N3) with a -C boundary arc opposite N3. -C - KT1 = 0 - 13 KT1 = KT1 + 1 - IF (LTRI(4,KT1) .EQ. 0) THEN - I1 = 2 - I2 = 3 - I3 = 1 - GO TO 14 - ELSEIF (LTRI(5,KT1) .EQ. 0) THEN - I1 = 3 - I2 = 1 - I3 = 2 - GO TO 14 - ELSEIF (LTRI(6,KT1) .EQ. 0) THEN - I1 = 1 - I2 = 2 - I3 = 3 - GO TO 14 - ENDIF - GO TO 13 - 14 N1 = LTRI(I1,KT1) - N0 = N1 -C -C Loop on boundary nodes N1 in CCW order, storing the -C indexes of the clockwise-ordered sequence of triangles -C that contain N1. The first triangle overwrites the -C last neighbor position, and the remaining triangles, -C if any, are appended to N1's adjacency list. -C -C A pointer to the first neighbor of N1 is saved in LPN. -C - 15 LP = LEND(N1) - LPN = LPTR(LP) - LISTC(LP) = KT1 -C -C Loop on triangles KT2 containing N1. -C - 16 KT2 = LTRI(I2+3,KT1) - IF (KT2 .NE. 0) THEN -C -C Append KT2 to N1's triangle list. -C - LPTR(LP) = LNEW - LP = LNEW - LISTC(LP) = KT2 - LNEW = LNEW + 1 -C -C Set KT1 to KT2 and update (I1,I2,I3) such that -C LTRI(I1,KT1) = N1. -C - KT1 = KT2 - IF (LTRI(1,KT1) .EQ. N1) THEN - I1 = 1 - I2 = 2 - I3 = 3 - ELSEIF (LTRI(2,KT1) .EQ. N1) THEN - I1 = 2 - I2 = 3 - I3 = 1 - ELSE - I1 = 3 - I2 = 1 - I3 = 2 - ENDIF - GO TO 16 - ENDIF -C -C Store the saved first-triangle pointer in LPTR(LP), set -C N1 to the next boundary node, test for termination, -C and permute the indexes: the last triangle containing -C a boundary node is the first triangle containing the -C next boundary node. -C - LPTR(LP) = LPN - N1 = LTRI(I3,KT1) - IF (N1 .NE. N0) THEN - I4 = I3 - I3 = I2 - I2 = I1 - I1 = I4 - GO TO 15 - ENDIF -C -C No errors encountered. -C - 20 IER = 0 - RETURN -C -C N < 3. -C - 21 IER = 1 - RETURN -C -C Insufficient space reserved for LTRI. -C - 22 IER = 2 - RETURN -C -C Error flag returned by CIRCUM: KT indexes a null triangle. -C - 23 IER = 3 - RETURN - END SUBROUTINE - SUBROUTINE DELARC (N,IO1,IO2, LIST,LPTR,LEND, - . LNEW, IER) - INTEGER N, IO1, IO2, LIST(*), LPTR(*), LEND(N), LNEW, - . IER -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine deletes a boundary arc from a triangula- -C tion. It may be used to remove a null triangle from the -C convex hull boundary. Note, however, that if the union of -C triangles is rendered nonconvex, Subroutines DELNOD, EDGE, -C and TRFIND (and hence ADDNOD) may fail. Also, Function -C NEARND should not be called following an arc deletion. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 4. -C -C IO1,IO2 = Indexes (in the range 1 to N) of a pair of -C adjacent boundary nodes defining the arc -C to be removed. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Triangulation data structure -C created by Subroutine TRMESH. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the removal of arc IO1-IO2 -C unless IER > 0. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if N, IO1, or IO2 is outside its valid -C range, or IO1 = IO2. -C IER = 2 if IO1-IO2 is not a boundary arc. -C IER = 3 if the node opposite IO1-IO2 is al- -C ready a boundary node, and thus IO1 -C or IO2 has only two neighbors or a -C deletion would result in two triangu- -C lations sharing a single node. -C IER = 4 if one of the nodes is a neighbor of -C the other, but not vice versa, imply- -C ing an invalid triangulation data -C structure. -C -C Module required by DELARC: DELNB, LSTPTR -C -C Intrinsic function called by DELARC: ABS -C -C*********************************************************** -C - INTEGER LP, LPH, LPL, N1, N2, N3 -C -C Local parameters: -C -C LP = LIST pointer -C LPH = LIST pointer or flag returned by DELNB -C LPL = Pointer to the last neighbor of N1, N2, or N3 -C N1,N2,N3 = Nodal indexes of a triangle such that N1->N2 -C is the directed boundary edge associated -C with IO1-IO2 -C - N1 = IO1 - N2 = IO2 -C -C Test for errors, and set N1->N2 to the directed boundary -C edge associated with IO1-IO2: (N1,N2,N3) is a triangle -C for some N3. -C - IF (N .LT. 4 .OR. N1 .LT. 1 .OR. N1 .GT. N .OR. - . N2 .LT. 1 .OR. N2 .GT. N .OR. N1 .EQ. N2) THEN - IER = 1 - RETURN - ENDIF -C - LPL = LEND(N2) - IF (-LIST(LPL) .NE. N1) THEN - N1 = N2 - N2 = IO1 - LPL = LEND(N2) - IF (-LIST(LPL) .NE. N1) THEN - IER = 2 - RETURN - ENDIF - ENDIF -C -C Set N3 to the node opposite N1->N2 (the second neighbor -C of N1), and test for error 3 (N3 already a boundary -C node). -C - LPL = LEND(N1) - LP = LPTR(LPL) - LP = LPTR(LP) - N3 = ABS(LIST(LP)) - LPL = LEND(N3) - IF (LIST(LPL) .LE. 0) THEN - IER = 3 - RETURN - ENDIF -C -C Delete N2 as a neighbor of N1, making N3 the first -C neighbor, and test for error 4 (N2 not a neighbor -C of N1). Note that previously computed pointers may -C no longer be valid following the call to DELNB. -C - CALL DELNB (N1,N2,N, LIST,LPTR,LEND,LNEW, LPH) - IF (LPH .LT. 0) THEN - IER = 4 - RETURN - ENDIF -C -C Delete N1 as a neighbor of N2, making N3 the new last -C neighbor. -C - CALL DELNB (N2,N1,N, LIST,LPTR,LEND,LNEW, LPH) -C -C Make N3 a boundary node with first neighbor N2 and last -C neighbor N1. -C - LP = LSTPTR(LEND(N3),N1,LIST,LPTR) - LEND(N3) = LP - LIST(LP) = -N1 -C -C No errors encountered. -C - IER = 0 - RETURN - END SUBROUTINE - SUBROUTINE DELNB (N0,NB,N, LIST,LPTR,LEND,LNEW, LPH) - INTEGER N0, NB, N, LIST(*), LPTR(*), LEND(N), LNEW, - . LPH -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/29/98 -C -C This subroutine deletes a neighbor NB from the adjacency -C list of node N0 (but N0 is not deleted from the adjacency -C list of NB) and, if NB is a boundary node, makes N0 a -C boundary node. For pointer (LIST index) LPH to NB as a -C neighbor of N0, the empty LIST,LPTR location LPH is filled -C in with the values at LNEW-1, pointer LNEW-1 (in LPTR and -C possibly in LEND) is changed to LPH, and LNEW is decremen- -C ted. This requires a search of LEND and LPTR entailing an -C expected operation count of O(N). -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C N0,NB = Indexes, in the range 1 to N, of a pair of -C nodes such that NB is a neighbor of N0. -C (N0 need not be a neighbor of NB.) -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the removal of NB from the ad- -C jacency list of N0 unless -C LPH < 0. -C -C LPH = List pointer to the hole (NB as a neighbor of -C N0) filled in by the values at LNEW-1 or error -C indicator: -C LPH > 0 if no errors were encountered. -C LPH = -1 if N0, NB, or N is outside its valid -C range. -C LPH = -2 if NB is not a neighbor of N0. -C -C Modules required by DELNB: None -C -C Intrinsic function called by DELNB: ABS -C -C*********************************************************** -C - INTEGER I, LNW, LP, LPB, LPL, LPP, NN -C -C Local parameters: -C -C I = DO-loop index -C LNW = LNEW-1 (output value of LNEW) -C LP = LIST pointer of the last neighbor of NB -C LPB = Pointer to NB as a neighbor of N0 -C LPL = Pointer to the last neighbor of N0 -C LPP = Pointer to the neighbor of N0 that precedes NB -C NN = Local copy of N -C - NN = N -C -C Test for error 1. -C - IF (N0 .LT. 1 .OR. N0 .GT. NN .OR. NB .LT. 1 .OR. - . NB .GT. NN .OR. NN .LT. 3) THEN - LPH = -1 - RETURN - ENDIF -C -C Find pointers to neighbors of N0: -C -C LPL points to the last neighbor, -C LPP points to the neighbor NP preceding NB, and -C LPB points to NB. -C - LPL = LEND(N0) - LPP = LPL - LPB = LPTR(LPP) - 1 IF (LIST(LPB) .EQ. NB) GO TO 2 - LPP = LPB - LPB = LPTR(LPP) - IF (LPB .NE. LPL) GO TO 1 -C -C Test for error 2 (NB not found). -C - IF (ABS(LIST(LPB)) .NE. NB) THEN - LPH = -2 - RETURN - ENDIF -C -C NB is the last neighbor of N0. Make NP the new last -C neighbor and, if NB is a boundary node, then make N0 -C a boundary node. -C - LEND(N0) = LPP - LP = LEND(NB) - IF (LIST(LP) .LT. 0) LIST(LPP) = -LIST(LPP) - GO TO 3 -C -C NB is not the last neighbor of N0. If NB is a boundary -C node and N0 is not, then make N0 a boundary node with -C last neighbor NP. -C - 2 LP = LEND(NB) - IF (LIST(LP) .LT. 0 .AND. LIST(LPL) .GT. 0) THEN - LEND(N0) = LPP - LIST(LPP) = -LIST(LPP) - ENDIF -C -C Update LPTR so that the neighbor following NB now fol- -C lows NP, and fill in the hole at location LPB. -C - 3 LPTR(LPP) = LPTR(LPB) - LNW = LNEW-1 - LIST(LPB) = LIST(LNW) - LPTR(LPB) = LPTR(LNW) - DO 4 I = NN,1,-1 - IF (LEND(I) .EQ. LNW) THEN - LEND(I) = LPB - GO TO 5 - ENDIF - 4 CONTINUE -C - 5 DO 6 I = 1,LNW-1 - IF (LPTR(I) .EQ. LNW) THEN - LPTR(I) = LPB - ENDIF - 6 CONTINUE -C -C No errors encountered. -C - LNEW = LNW - LPH = LPB - RETURN - END SUBROUTINE - SUBROUTINE DELNOD (K, N,X,Y,Z,LIST,LPTR,LEND,LNEW,LWK, - . IWK, IER) - INTEGER K, N, LIST(*), LPTR(*), LEND(*), LNEW, LWK, - . IWK(2,*), IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 11/30/99 -C -C This subroutine deletes node K (along with all arcs -C incident on node K) from a triangulation of N nodes on the -C unit sphere, and inserts arcs as necessary to produce a -C triangulation of the remaining N-1 nodes. If a Delaunay -C triangulation is input, a Delaunay triangulation will -C result, and thus, DELNOD reverses the effect of a call to -C Subroutine ADDNOD. -C -C -C On input: -C -C K = Index (for X, Y, and Z) of the node to be -C deleted. 1 .LE. K .LE. N. -C -C K is not altered by this routine. -C -C N = Number of nodes in the triangulation on input. -C N .GE. 4. Note that N will be decremented -C following the deletion. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes in the triangula- -C tion. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. Refer to Sub- -C routine TRMESH. -C -C LWK = Number of columns reserved for IWK. LWK must -C be at least NNB-3, where NNB is the number of -C neighbors of node K, including an extra -C pseudo-node if K is a boundary node. -C -C IWK = Integer work array dimensioned 2 by LWK (or -C array of length .GE. 2*LWK). -C -C On output: -C -C N = Number of nodes in the triangulation on output. -C The input value is decremented unless 1 .LE. IER -C .LE. 4. -C -C X,Y,Z = Updated arrays containing nodal coordinates -C (with elements K+1,...,N+1 shifted up one -C position, thus overwriting element K) unless -C 1 .LE. IER .LE. 4. -C -C LIST,LPTR,LEND,LNEW = Updated triangulation data -C structure reflecting the dele- -C tion unless 1 .LE. IER .LE. 4. -C Note that the data structure -C may have been altered if IER > -C 3. -C -C LWK = Number of IWK columns required unless IER = 1 -C or IER = 3. -C -C IWK = Indexes of the endpoints of the new arcs added -C unless LWK = 0 or 1 .LE. IER .LE. 4. (Arcs -C are associated with columns, or pairs of -C adjacent elements if IWK is declared as a -C singly-subscripted array.) -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if K or N is outside its valid range -C or LWK < 0 on input. -C IER = 2 if more space is required in IWK. -C Refer to LWK. -C IER = 3 if the triangulation data structure is -C invalid on input. -C IER = 4 if K indexes an interior node with -C four or more neighbors, none of which -C can be swapped out due to collineari- -C ty, and K cannot therefore be deleted. -C IER = 5 if an error flag (other than IER = 1) -C was returned by OPTIM. An error -C message is written to the standard -C output unit in this case. -C IER = 6 if error flag 1 was returned by OPTIM. -C This is not necessarily an error, but -C the arcs may not be optimal. -C -C Note that the deletion may result in all remaining nodes -C being collinear. This situation is not flagged. -C -C Modules required by DELNOD: DELNB, LEFT, LSTPTR, NBCNT, -C OPTIM, SWAP, SWPTST -C -C Intrinsic function called by DELNOD: ABS -C -C*********************************************************** -C - INTEGER I, IERR, IWL, J, LNW, LP, LP21, LPF, LPH, LPL, - . LPL2, LPN, LWKL, N1, N2, NFRST, NIT, NL, NN, - . NNB, NR - LOGICAL BDRY - REAL X1, X2, XL, XR, Y1, Y2, YL, YR, Z1, Z2, ZL, ZR -C -C Local parameters: -C -C BDRY = Logical variable with value TRUE iff N1 is a -C boundary node -C I,J = DO-loop indexes -C IERR = Error flag returned by OPTIM -C IWL = Number of IWK columns containing arcs -C LNW = Local copy of LNEW -C LP = LIST pointer -C LP21 = LIST pointer returned by SWAP -C LPF,LPL = Pointers to the first and last neighbors of N1 -C LPH = Pointer (or flag) returned by DELNB -C LPL2 = Pointer to the last neighbor of N2 -C LPN = Pointer to a neighbor of N1 -C LWKL = Input value of LWK -C N1 = Local copy of K -C N2 = Neighbor of N1 -C NFRST = First neighbor of N1: LIST(LPF) -C NIT = Number of iterations in OPTIM -C NR,NL = Neighbors of N1 preceding (to the right of) and -C following (to the left of) N2, respectively -C NN = Number of nodes in the triangulation -C NNB = Number of neighbors of N1 (including a pseudo- -C node representing the boundary if N1 is a -C boundary node) -C X1,Y1,Z1 = Coordinates of N1 -C X2,Y2,Z2 = Coordinates of N2 -C XL,YL,ZL = Coordinates of NL -C XR,YR,ZR = Coordinates of NR -C -C -C Set N1 to K and NNB to the number of neighbors of N1 (plus -C one if N1 is a boundary node), and test for errors. LPF -C and LPL are LIST indexes of the first and last neighbors -C of N1, IWL is the number of IWK columns containing arcs, -C and BDRY is TRUE iff N1 is a boundary node. -C - N1 = K - NN = N - IF (N1 .LT. 1 .OR. N1 .GT. NN .OR. NN .LT. 4 .OR. - . LWK .LT. 0) GO TO 21 - LPL = LEND(N1) - LPF = LPTR(LPL) - NNB = NBCNT(LPL,LPTR) - BDRY = LIST(LPL) .LT. 0 - IF (BDRY) NNB = NNB + 1 - IF (NNB .LT. 3) GO TO 23 - LWKL = LWK - LWK = NNB - 3 - IF (LWKL .LT. LWK) GO TO 22 - IWL = 0 - IF (NNB .EQ. 3) GO TO 3 -C -C Initialize for loop on arcs N1-N2 for neighbors N2 of N1, -C beginning with the second neighbor. NR and NL are the -C neighbors preceding and following N2, respectively, and -C LP indexes NL. The loop is exited when all possible -C swaps have been applied to arcs incident on N1. -C - X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - NFRST = LIST(LPF) - NR = NFRST - XR = X(NR) - YR = Y(NR) - ZR = Z(NR) - LP = LPTR(LPF) - N2 = LIST(LP) - X2 = X(N2) - Y2 = Y(N2) - Z2 = Z(N2) - LP = LPTR(LP) -C -C Top of loop: set NL to the neighbor following N2. -C - 1 NL = ABS(LIST(LP)) - IF (NL .EQ. NFRST .AND. BDRY) GO TO 3 - XL = X(NL) - YL = Y(NL) - ZL = Z(NL) -C -C Test for a convex quadrilateral. To avoid an incorrect -C test caused by collinearity, use the fact that if N1 -C is a boundary node, then N1 LEFT NR->NL and if N2 is -C a boundary node, then N2 LEFT NL->NR. -C - LPL2 = LEND(N2) - IF ( .NOT. ((BDRY .OR. LEFT(XR,YR,ZR,XL,YL,ZL,X1,Y1, - . Z1)) .AND. (LIST(LPL2) .LT. 0 .OR. - . LEFT(XL,YL,ZL,XR,YR,ZR,X2,Y2,Z2))) ) THEN -C -C Nonconvex quadrilateral -- no swap is possible. -C - NR = N2 - XR = X2 - YR = Y2 - ZR = Z2 - GO TO 2 - ENDIF -C -C The quadrilateral defined by adjacent triangles -C (N1,N2,NL) and (N2,N1,NR) is convex. Swap in -C NL-NR and store it in IWK unless NL and NR are -C already adjacent, in which case the swap is not -C possible. Indexes larger than N1 must be decremented -C since N1 will be deleted from X, Y, and Z. -C - CALL SWAP (NL,NR,N1,N2, LIST,LPTR,LEND, LP21) - IF (LP21 .EQ. 0) THEN - NR = N2 - XR = X2 - YR = Y2 - ZR = Z2 - GO TO 2 - ENDIF - IWL = IWL + 1 - IF (NL .LE. N1) THEN - IWK(1,IWL) = NL - ELSE - IWK(1,IWL) = NL - 1 - ENDIF - IF (NR .LE. N1) THEN - IWK(2,IWL) = NR - ELSE - IWK(2,IWL) = NR - 1 - ENDIF -C -C Recompute the LIST indexes and NFRST, and decrement NNB. -C - LPL = LEND(N1) - NNB = NNB - 1 - IF (NNB .EQ. 3) GO TO 3 - LPF = LPTR(LPL) - NFRST = LIST(LPF) - LP = LSTPTR(LPL,NL,LIST,LPTR) - IF (NR .EQ. NFRST) GO TO 2 -C -C NR is not the first neighbor of N1. -C Back up and test N1-NR for a swap again: Set N2 to -C NR and NR to the previous neighbor of N1 -- the -C neighbor of NR which follows N1. LP21 points to NL -C as a neighbor of NR. -C - N2 = NR - X2 = XR - Y2 = YR - Z2 = ZR - LP21 = LPTR(LP21) - LP21 = LPTR(LP21) - NR = ABS(LIST(LP21)) - XR = X(NR) - YR = Y(NR) - ZR = Z(NR) - GO TO 1 -C -C Bottom of loop -- test for termination of loop. -C - 2 IF (N2 .EQ. NFRST) GO TO 3 - N2 = NL - X2 = XL - Y2 = YL - Z2 = ZL - LP = LPTR(LP) - GO TO 1 -C -C Delete N1 and all its incident arcs. If N1 is an interior -C node and either NNB > 3 or NNB = 3 and N2 LEFT NR->NL, -C then N1 must be separated from its neighbors by a plane -C containing the origin -- its removal reverses the effect -C of a call to COVSPH, and all its neighbors become -C boundary nodes. This is achieved by treating it as if -C it were a boundary node (setting BDRY to TRUE, changing -C a sign in LIST, and incrementing NNB). -C - 3 IF (.NOT. BDRY) THEN - IF (NNB .GT. 3) THEN - BDRY = .TRUE. - ELSE - LPF = LPTR(LPL) - NR = LIST(LPF) - LP = LPTR(LPF) - N2 = LIST(LP) - NL = LIST(LPL) - BDRY = LEFT(X(NR),Y(NR),Z(NR),X(NL),Y(NL),Z(NL), - . X(N2),Y(N2),Z(N2)) - ENDIF - IF (BDRY) THEN -C -C IF a boundary node already exists, then N1 and its -C neighbors cannot be converted to boundary nodes. -C (They must be collinear.) This is a problem if -C NNB > 3. -C - DO 4 I = 1,NN - IF (LIST(LEND(I)) .LT. 0) THEN - BDRY = .FALSE. - GO TO 5 - ENDIF - 4 CONTINUE - LIST(LPL) = -LIST(LPL) - NNB = NNB + 1 - ENDIF - ENDIF - 5 IF (.NOT. BDRY .AND. NNB .GT. 3) GO TO 24 -C -C Initialize for loop on neighbors. LPL points to the last -C neighbor of N1. LNEW is stored in local variable LNW. -C - LP = LPL - LNW = LNEW -C -C Loop on neighbors N2 of N1, beginning with the first. -C - 6 LP = LPTR(LP) - N2 = ABS(LIST(LP)) - CALL DELNB (N2,N1,N, LIST,LPTR,LEND,LNW, LPH) - IF (LPH .LT. 0) GO TO 23 -C -C LP and LPL may require alteration. -C - IF (LPL .EQ. LNW) LPL = LPH - IF (LP .EQ. LNW) LP = LPH - IF (LP .NE. LPL) GO TO 6 -C -C Delete N1 from X, Y, Z, and LEND, and remove its adjacency -C list from LIST and LPTR. LIST entries (nodal indexes) -C which are larger than N1 must be decremented. -C - NN = NN - 1 - IF (N1 .GT. NN) GO TO 9 - DO 7 I = N1,NN - X(I) = X(I+1) - Y(I) = Y(I+1) - Z(I) = Z(I+1) - LEND(I) = LEND(I+1) - 7 CONTINUE -C - DO 8 I = 1,LNW-1 - IF (LIST(I) .GT. N1) LIST(I) = LIST(I) - 1 - IF (LIST(I) .LT. -N1) LIST(I) = LIST(I) + 1 - 8 CONTINUE -C -C For LPN = first to last neighbors of N1, delete the -C preceding neighbor (indexed by LP). -C -C Each empty LIST,LPTR location LP is filled in with the -C values at LNW-1, and LNW is decremented. All pointers -C (including those in LPTR and LEND) with value LNW-1 -C must be changed to LP. -C -C LPL points to the last neighbor of N1. -C - 9 IF (BDRY) NNB = NNB - 1 - LPN = LPL - DO 13 J = 1,NNB - LNW = LNW - 1 - LP = LPN - LPN = LPTR(LP) - LIST(LP) = LIST(LNW) - LPTR(LP) = LPTR(LNW) - IF (LPTR(LPN) .EQ. LNW) LPTR(LPN) = LP - IF (LPN .EQ. LNW) LPN = LP - DO 10 I = NN,1,-1 - IF (LEND(I) .EQ. LNW) THEN - LEND(I) = LP - GO TO 11 - ENDIF - 10 CONTINUE -C - 11 DO 12 I = LNW-1,1,-1 - IF (LPTR(I) .EQ. LNW) LPTR(I) = LP - 12 CONTINUE - 13 CONTINUE -C -C Update N and LNEW, and optimize the patch of triangles -C containing K (on input) by applying swaps to the arcs -C in IWK. -C - N = NN - LNEW = LNW - IF (IWL .GT. 0) THEN - NIT = 4*IWL - CALL OPTIM (X,Y,Z,IWL, LIST,LPTR,LEND,NIT,IWK, IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 25 - IF (IERR .EQ. 1) GO TO 26 - ENDIF -C -C Successful termination. -C - IER = 0 - RETURN -C -C Invalid input parameter. -C - 21 IER = 1 - RETURN -C -C Insufficient space reserved for IWK. -C - 22 IER = 2 - RETURN -C -C Invalid triangulation data structure. NNB < 3 on input or -C N2 is a neighbor of N1 but N1 is not a neighbor of N2. -C - 23 IER = 3 - RETURN -C -C N1 is interior but NNB could not be reduced to 3. -C - 24 IER = 4 - RETURN -C -C Error flag (other than 1) returned by OPTIM. -C - 25 IER = 5 - WRITE (*,100) NIT, IERR - 100 FORMAT (//5X,'*** Error in OPTIM (called from ', - . 'DELNOD): NIT = ',I4,', IER = ',I1,' ***'/) - RETURN -C -C Error flag 1 returned by OPTIM. -C - 26 IER = 6 - RETURN - END SUBROUTINE - SUBROUTINE EDGE (IN1,IN2,X,Y,Z, LWK,IWK,LIST,LPTR, - . LEND, IER) - INTEGER IN1, IN2, LWK, IWK(2,*), LIST(*), LPTR(*), - . LEND(*), IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/30/98 -C -C Given a triangulation of N nodes and a pair of nodal -C indexes IN1 and IN2, this routine swaps arcs as necessary -C to force IN1 and IN2 to be adjacent. Only arcs which -C intersect IN1-IN2 are swapped out. If a Delaunay triangu- -C lation is input, the resulting triangulation is as close -C as possible to a Delaunay triangulation in the sense that -C all arcs other than IN1-IN2 are locally optimal. -C -C A sequence of calls to EDGE may be used to force the -C presence of a set of edges defining the boundary of a non- -C convex and/or multiply connected region, or to introduce -C barriers into the triangulation. Note that Subroutine -C GETNP will not necessarily return closest nodes if the -C triangulation has been constrained by a call to EDGE. -C However, this is appropriate in some applications, such -C as triangle-based interpolation on a nonconvex domain. -C -C -C On input: -C -C IN1,IN2 = Indexes (of X, Y, and Z) in the range 1 to -C N defining a pair of nodes to be connected -C by an arc. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C The above parameters are not altered by this routine. -C -C LWK = Number of columns reserved for IWK. This must -C be at least NI -- the number of arcs that -C intersect IN1-IN2. (NI is bounded by N-3.) -C -C IWK = Integer work array of length at least 2*LWK. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LWK = Number of arcs which intersect IN1-IN2 (but -C not more than the input value of LWK) unless -C IER = 1 or IER = 3. LWK = 0 if and only if -C IN1 and IN2 were adjacent (or LWK=0) on input. -C -C IWK = Array containing the indexes of the endpoints -C of the new arcs other than IN1-IN2 unless -C IER > 0 or LWK = 0. New arcs to the left of -C IN1->IN2 are stored in the first K-1 columns -C (left portion of IWK), column K contains -C zeros, and new arcs to the right of IN1->IN2 -C occupy columns K+1,...,LWK. (K can be deter- -C mined by searching IWK for the zeros.) -C -C LIST,LPTR,LEND = Data structure updated if necessary -C to reflect the presence of an arc -C connecting IN1 and IN2 unless IER > -C 0. The data structure has been -C altered if IER >= 4. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if IN1 < 1, IN2 < 1, IN1 = IN2, -C or LWK < 0 on input. -C IER = 2 if more space is required in IWK. -C Refer to LWK. -C IER = 3 if IN1 and IN2 could not be connected -C due to either an invalid data struc- -C ture or collinear nodes (and floating -C point error). -C IER = 4 if an error flag other than IER = 1 -C was returned by OPTIM. -C IER = 5 if error flag 1 was returned by OPTIM. -C This is not necessarily an error, but -C the arcs other than IN1-IN2 may not -C be optimal. -C -C An error message is written to the standard output unit -C in the case of IER = 3 or IER = 4. -C -C Modules required by EDGE: LEFT, LSTPTR, OPTIM, SWAP, -C SWPTST -C -C Intrinsic function called by EDGE: ABS -C -C*********************************************************** -C - INTEGER I, IERR, IWC, IWCP1, IWEND, IWF, IWL, LFT, LP, - . LP21, LPL, N0, N1, N1FRST, N1LST, N2, NEXT, - . NIT, NL, NR - REAL DP12, DP1L, DP1R, DP2L, DP2R, X0, X1, X2, Y0, - . Y1, Y2, Z0, Z1, Z2 -C -C Local parameters: -C -C DPij = Dot product -C I = DO-loop index and column index for IWK -C IERR = Error flag returned by Subroutine OPTIM -C IWC = IWK index between IWF and IWL -- NL->NR is -C stored in IWK(1,IWC)->IWK(2,IWC) -C IWCP1 = IWC + 1 -C IWEND = Input or output value of LWK -C IWF = IWK (column) index of the first (leftmost) arc -C which intersects IN1->IN2 -C IWL = IWK (column) index of the last (rightmost) are -C which intersects IN1->IN2 -C LFT = Flag used to determine if a swap results in the -C new arc intersecting IN1-IN2 -- LFT = 0 iff -C N0 = IN1, LFT = -1 implies N0 LEFT IN1->IN2, -C and LFT = 1 implies N0 LEFT IN2->IN1 -C LP = List pointer (index for LIST and LPTR) -C LP21 = Unused parameter returned by SWAP -C LPL = Pointer to the last neighbor of IN1 or NL -C N0 = Neighbor of N1 or node opposite NR->NL -C N1,N2 = Local copies of IN1 and IN2 -C N1FRST = First neighbor of IN1 -C N1LST = (Signed) last neighbor of IN1 -C NEXT = Node opposite NL->NR -C NIT = Flag or number of iterations employed by OPTIM -C NL,NR = Endpoints of an arc which intersects IN1-IN2 -C with NL LEFT IN1->IN2 -C X0,Y0,Z0 = Coordinates of N0 -C X1,Y1,Z1 = Coordinates of IN1 -C X2,Y2,Z2 = Coordinates of IN2 -C -C -C Store IN1, IN2, and LWK in local variables and test for -C errors. -C - N1 = IN1 - N2 = IN2 - IWEND = LWK - IF (N1 .LT. 1 .OR. N2 .LT. 1 .OR. N1 .EQ. N2 .OR. - . IWEND .LT. 0) GO TO 31 -C -C Test for N2 as a neighbor of N1. LPL points to the last -C neighbor of N1. -C - LPL = LEND(N1) - N0 = ABS(LIST(LPL)) - LP = LPL - 1 IF (N0 .EQ. N2) GO TO 30 - LP = LPTR(LP) - N0 = LIST(LP) - IF (LP .NE. LPL) GO TO 1 -C -C Initialize parameters. -C - IWL = 0 - NIT = 0 -C -C Store the coordinates of N1 and N2. -C - 2 X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - X2 = X(N2) - Y2 = Y(N2) - Z2 = Z(N2) -C -C Set NR and NL to adjacent neighbors of N1 such that -C NR LEFT N2->N1 and NL LEFT N1->N2, -C (NR Forward N1->N2 or NL Forward N1->N2), and -C (NR Forward N2->N1 or NL Forward N2->N1). -C -C Initialization: Set N1FRST and N1LST to the first and -C (signed) last neighbors of N1, respectively, and -C initialize NL to N1FRST. -C - LPL = LEND(N1) - N1LST = LIST(LPL) - LP = LPTR(LPL) - N1FRST = LIST(LP) - NL = N1FRST - IF (N1LST .LT. 0) GO TO 4 -C -C N1 is an interior node. Set NL to the first candidate -C for NR (NL LEFT N2->N1). -C - 3 IF (LEFT(X2,Y2,Z2,X1,Y1,Z1,X(NL),Y(NL),Z(NL))) GO TO 4 - LP = LPTR(LP) - NL = LIST(LP) - IF (NL .NE. N1FRST) GO TO 3 -C -C All neighbors of N1 are strictly left of N1->N2. -C - GO TO 5 -C -C NL = LIST(LP) LEFT N2->N1. Set NR to NL and NL to the -C following neighbor of N1. -C - 4 NR = NL - LP = LPTR(LP) - NL = ABS(LIST(LP)) - IF (LEFT(X1,Y1,Z1,X2,Y2,Z2,X(NL),Y(NL),Z(NL)) ) THEN -C -C NL LEFT N1->N2 and NR LEFT N2->N1. The Forward tests -C are employed to avoid an error associated with -C collinear nodes. -C - DP12 = X1*X2 + Y1*Y2 + Z1*Z2 - DP1L = X1*X(NL) + Y1*Y(NL) + Z1*Z(NL) - DP2L = X2*X(NL) + Y2*Y(NL) + Z2*Z(NL) - DP1R = X1*X(NR) + Y1*Y(NR) + Z1*Z(NR) - DP2R = X2*X(NR) + Y2*Y(NR) + Z2*Z(NR) - IF ( (DP2L-DP12*DP1L .GE. 0. .OR. - . DP2R-DP12*DP1R .GE. 0.) .AND. - . (DP1L-DP12*DP2L .GE. 0. .OR. - . DP1R-DP12*DP2R .GE. 0.) ) GO TO 6 -C -C NL-NR does not intersect N1-N2. However, there is -C another candidate for the first arc if NL lies on -C the line N1-N2. -C - IF ( .NOT. LEFT(X2,Y2,Z2,X1,Y1,Z1,X(NL),Y(NL), - . Z(NL)) ) GO TO 5 - ENDIF -C -C Bottom of loop. -C - IF (NL .NE. N1FRST) GO TO 4 -C -C Either the triangulation is invalid or N1-N2 lies on the -C convex hull boundary and an edge NR->NL (opposite N1 and -C intersecting N1-N2) was not found due to floating point -C error. Try interchanging N1 and N2 -- NIT > 0 iff this -C has already been done. -C - 5 IF (NIT .GT. 0) GO TO 33 - NIT = 1 - N1 = N2 - N2 = IN1 - GO TO 2 -C -C Store the ordered sequence of intersecting edges NL->NR in -C IWK(1,IWL)->IWK(2,IWL). -C - 6 IWL = IWL + 1 - IF (IWL .GT. IWEND) GO TO 32 - IWK(1,IWL) = NL - IWK(2,IWL) = NR -C -C Set NEXT to the neighbor of NL which follows NR. -C - LPL = LEND(NL) - LP = LPTR(LPL) -C -C Find NR as a neighbor of NL. The search begins with -C the first neighbor. -C - 7 IF (LIST(LP) .EQ. NR) GO TO 8 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 7 -C -C NR must be the last neighbor, and NL->NR cannot be a -C boundary edge. -C - IF (LIST(LP) .NE. NR) GO TO 33 -C -C Set NEXT to the neighbor following NR, and test for -C termination of the store loop. -C - 8 LP = LPTR(LP) - NEXT = ABS(LIST(LP)) - IF (NEXT .EQ. N2) GO TO 9 -C -C Set NL or NR to NEXT. -C - IF ( LEFT(X1,Y1,Z1,X2,Y2,Z2,X(NEXT),Y(NEXT),Z(NEXT)) ) - . THEN - NL = NEXT - ELSE - NR = NEXT - ENDIF - GO TO 6 -C -C IWL is the number of arcs which intersect N1-N2. -C Store LWK. -C - 9 LWK = IWL - IWEND = IWL -C -C Initialize for edge swapping loop -- all possible swaps -C are applied (even if the new arc again intersects -C N1-N2), arcs to the left of N1->N2 are stored in the -C left portion of IWK, and arcs to the right are stored in -C the right portion. IWF and IWL index the first and last -C intersecting arcs. -C - IWF = 1 -C -C Top of loop -- set N0 to N1 and NL->NR to the first edge. -C IWC points to the arc currently being processed. LFT -C .LE. 0 iff N0 LEFT N1->N2. -C - 10 LFT = 0 - N0 = N1 - X0 = X1 - Y0 = Y1 - Z0 = Z1 - NL = IWK(1,IWF) - NR = IWK(2,IWF) - IWC = IWF -C -C Set NEXT to the node opposite NL->NR unless IWC is the -C last arc. -C - 11 IF (IWC .EQ. IWL) GO TO 21 - IWCP1 = IWC + 1 - NEXT = IWK(1,IWCP1) - IF (NEXT .NE. NL) GO TO 16 - NEXT = IWK(2,IWCP1) -C -C NEXT RIGHT N1->N2 and IWC .LT. IWL. Test for a possible -C swap. -C - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 14 - IF (LFT .GE. 0) GO TO 12 - IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 14 -C -C Replace NL->NR with N0->NEXT. -C - CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = N0 - IWK(2,IWC) = NEXT - GO TO 15 -C -C Swap NL-NR for N0-NEXT, shift columns IWC+1,...,IWL to -C the left, and store N0-NEXT in the right portion of -C IWK. -C - 12 CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - DO 13 I = IWCP1,IWL - IWK(1,I-1) = IWK(1,I) - IWK(2,I-1) = IWK(2,I) - 13 CONTINUE - IWK(1,IWL) = N0 - IWK(2,IWL) = NEXT - IWL = IWL - 1 - NR = NEXT - GO TO 11 -C -C A swap is not possible. Set N0 to NR. -C - 14 N0 = NR - X0 = X(N0) - Y0 = Y(N0) - Z0 = Z(N0) - LFT = 1 -C -C Advance to the next arc. -C - 15 NR = NEXT - IWC = IWC + 1 - GO TO 11 -C -C NEXT LEFT N1->N2, NEXT .NE. N2, and IWC .LT. IWL. -C Test for a possible swap. -C - 16 IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 19 - IF (LFT .LE. 0) GO TO 17 - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X(NEXT), - . Y(NEXT),Z(NEXT)) ) GO TO 19 -C -C Replace NL->NR with NEXT->N0. -C - CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = NEXT - IWK(2,IWC) = N0 - GO TO 20 -C -C Swap NL-NR for N0-NEXT, shift columns IWF,...,IWC-1 to -C the right, and store N0-NEXT in the left portion of -C IWK. -C - 17 CALL SWAP (NEXT,N0,NL,NR, LIST,LPTR,LEND, LP21) - DO 18 I = IWC-1,IWF,-1 - IWK(1,I+1) = IWK(1,I) - IWK(2,I+1) = IWK(2,I) - 18 CONTINUE - IWK(1,IWF) = N0 - IWK(2,IWF) = NEXT - IWF = IWF + 1 - GO TO 20 -C -C A swap is not possible. Set N0 to NL. -C - 19 N0 = NL - X0 = X(N0) - Y0 = Y(N0) - Z0 = Z(N0) - LFT = -1 -C -C Advance to the next arc. -C - 20 NL = NEXT - IWC = IWC + 1 - GO TO 11 -C -C N2 is opposite NL->NR (IWC = IWL). -C - 21 IF (N0 .EQ. N1) GO TO 24 - IF (LFT .LT. 0) GO TO 22 -C -C N0 RIGHT N1->N2. Test for a possible swap. -C - IF ( .NOT. LEFT(X0,Y0,Z0,X(NR),Y(NR),Z(NR),X2,Y2,Z2) ) - . GO TO 10 -C -C Swap NL-NR for N0-N2 and store N0-N2 in the right -C portion of IWK. -C - CALL SWAP (N2,N0,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWL) = N0 - IWK(2,IWL) = N2 - IWL = IWL - 1 - GO TO 10 -C -C N0 LEFT N1->N2. Test for a possible swap. -C - 22 IF ( .NOT. LEFT(X(NL),Y(NL),Z(NL),X0,Y0,Z0,X2,Y2,Z2) ) - . GO TO 10 -C -C Swap NL-NR for N0-N2, shift columns IWF,...,IWL-1 to the -C right, and store N0-N2 in the left portion of IWK. -C - CALL SWAP (N2,N0,NL,NR, LIST,LPTR,LEND, LP21) - I = IWL - 23 IWK(1,I) = IWK(1,I-1) - IWK(2,I) = IWK(2,I-1) - I = I - 1 - IF (I .GT. IWF) GO TO 23 - IWK(1,IWF) = N0 - IWK(2,IWF) = N2 - IWF = IWF + 1 - GO TO 10 -C -C IWF = IWC = IWL. Swap out the last arc for N1-N2 and -C store zeros in IWK. -C - 24 CALL SWAP (N2,N1,NL,NR, LIST,LPTR,LEND, LP21) - IWK(1,IWC) = 0 - IWK(2,IWC) = 0 -C -C Optimization procedure -- -C - IER = 0 - IF (IWC .GT. 1) THEN -C -C Optimize the set of new arcs to the left of IN1->IN2. -C - NIT = 4*(IWC-1) - CALL OPTIM (X,Y,Z,IWC-1, LIST,LPTR,LEND,NIT, - . IWK, IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 34 - IF (IERR .EQ. 1) IER = 5 - ENDIF - IF (IWC .LT. IWEND) THEN -C -C Optimize the set of new arcs to the right of IN1->IN2. -C - NIT = 4*(IWEND-IWC) - CALL OPTIM (X,Y,Z,IWEND-IWC, LIST,LPTR,LEND,NIT, - . IWK(1,IWC+1), IERR) - IF (IERR .NE. 0 .AND. IERR .NE. 1) GO TO 34 - IF (IERR .EQ. 1) GO TO 35 - ENDIF - IF (IER .EQ. 5) GO TO 35 -C -C Successful termination (IER = 0). -C - RETURN -C -C IN1 and IN2 were adjacent on input. -C - 30 IER = 0 - RETURN -C -C Invalid input parameter. -C - 31 IER = 1 - RETURN -C -C Insufficient space reserved for IWK. -C - 32 IER = 2 - RETURN -C -C Invalid triangulation data structure or collinear nodes -C on convex hull boundary. -C - 33 IER = 3 - WRITE (*,130) IN1, IN2 - 130 FORMAT (//5X,'*** Error in EDGE: Invalid triangula', - . 'tion or null triangles on boundary'/ - . 9X,'IN1 =',I4,', IN2=',I4/) - RETURN -C -C Error flag (other than 1) returned by OPTIM. -C - 34 IER = 4 - WRITE (*,140) NIT, IERR - 140 FORMAT (//5X,'*** Error in OPTIM (called from EDGE):', - . ' NIT = ',I4,', IER = ',I1,' ***'/) - RETURN -C -C Error flag 1 returned by OPTIM. -C - 35 IER = 5 - RETURN - END SUBROUTINE - SUBROUTINE GETNP (X,Y,Z,LIST,LPTR,LEND,L, NPTS, DF, - . IER) - INTEGER LIST(*), LPTR(*), LEND(*), L, NPTS(L), IER - REAL X(*), Y(*), Z(*), DF -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C Given a Delaunay triangulation of N nodes on the unit -C sphere and an array NPTS containing the indexes of L-1 -C nodes ordered by angular distance from NPTS(1), this sub- -C routine sets NPTS(L) to the index of the next node in the -C sequence -- the node, other than NPTS(1),...,NPTS(L-1), -C that is closest to NPTS(1). Thus, the ordered sequence -C of K closest nodes to N1 (including N1) may be determined -C by K-1 calls to GETNP with NPTS(1) = N1 and L = 2,3,...,K -C for K .GE. 2. -C -C The algorithm uses the property of a Delaunay triangula- -C tion that the K-th closest node to N1 is a neighbor of one -C of the K-1 closest nodes to N1. -C -C -C On input: -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C LIST,LPTR,LEND = Triangulation data structure. Re- -C fer to Subroutine TRMESH. -C -C L = Number of nodes in the sequence on output. 2 -C .LE. L .LE. N. -C -C The above parameters are not altered by this routine. -C -C NPTS = Array of length .GE. L containing the indexes -C of the L-1 closest nodes to NPTS(1) in the -C first L-1 locations. -C -C On output: -C -C NPTS = Array updated with the index of the L-th -C closest node to NPTS(1) in position L unless -C IER = 1. -C -C DF = Value of an increasing function (negative cos- -C ine) of the angular distance between NPTS(1) -C and NPTS(L) unless IER = 1. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if L < 2. -C -C Modules required by GETNP: None -C -C Intrinsic function called by GETNP: ABS -C -C*********************************************************** -C - INTEGER I, LM1, LP, LPL, N1, NB, NI, NP - REAL DNB, DNP, X1, Y1, Z1 -C -C Local parameters: -C -C DNB,DNP = Negative cosines of the angular distances from -C N1 to NB and to NP, respectively -C I = NPTS index and DO-loop index -C LM1 = L-1 -C LP = LIST pointer of a neighbor of NI -C LPL = Pointer to the last neighbor of NI -C N1 = NPTS(1) -C NB = Neighbor of NI and candidate for NP -C NI = NPTS(I) -C NP = Candidate for NPTS(L) -C X1,Y1,Z1 = Coordinates of N1 -C - LM1 = L - 1 - IF (LM1 .LT. 1) GO TO 6 - IER = 0 -C -C Store N1 = NPTS(1) and mark the elements of NPTS. -C - N1 = NPTS(1) - X1 = X(N1) - Y1 = Y(N1) - Z1 = Z(N1) - DO 1 I = 1,LM1 - NI = NPTS(I) - LEND(NI) = -LEND(NI) - 1 CONTINUE -C -C Candidates for NP = NPTS(L) are the unmarked neighbors -C of nodes in NPTS. DNP is initially greater than -cos(PI) -C (the maximum distance). -C - DNP = 2. -C -C Loop on nodes NI in NPTS. -C - DO 4 I = 1,LM1 - NI = NPTS(I) - LPL = -LEND(NI) - LP = LPL -C -C Loop on neighbors NB of NI. -C - 2 NB = ABS(LIST(LP)) - IF (LEND(NB) .LT. 0) GO TO 3 -C -C NB is an unmarked neighbor of NI. Replace NP if NB is -C closer to N1. -C - DNB = -(X(NB)*X1 + Y(NB)*Y1 + Z(NB)*Z1) - IF (DNB .GE. DNP) GO TO 3 - NP = NB - DNP = DNB - 3 LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 2 - 4 CONTINUE - NPTS(L) = NP - DF = DNP -C -C Unmark the elements of NPTS. -C - DO 5 I = 1,LM1 - NI = NPTS(I) - LEND(NI) = -LEND(NI) - 5 CONTINUE - RETURN -C -C L is outside its valid range. -C - 6 IER = 1 - RETURN - END SUBROUTINE - SUBROUTINE INSERT (K,LP, LIST,LPTR,LNEW ) - INTEGER K, LP, LIST(*), LPTR(*), LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine inserts K as a neighbor of N1 following -C N2, where LP is the LIST pointer of N2 as a neighbor of -C N1. Note that, if N2 is the last neighbor of N1, K will -C become the first neighbor (even if N1 is a boundary node). -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C K = Index of the node to be inserted. -C -C LP = LIST pointer of N2 as a neighbor of N1. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LNEW = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LIST,LPTR,LNEW = Data structure updated with the -C addition of node K. -C -C Modules required by INSERT: None -C -C*********************************************************** -C - INTEGER LSAV -C - LSAV = LPTR(LP) - LPTR(LP) = LNEW - LIST(LNEW) = K - LPTR(LNEW) = LSAV - LNEW = LNEW + 1 - RETURN - END SUBROUTINE - LOGICAL FUNCTION INSIDE (P,LV,XV,YV,ZV,NV,LISTV, IER) - INTEGER LV, NV, LISTV(NV), IER - REAL P(3), XV(LV), YV(LV), ZV(LV) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 12/27/93 -C -C This function locates a point P relative to a polygonal -C region R on the surface of the unit sphere, returning -C INSIDE = TRUE if and only if P is contained in R. R is -C defined by a cyclically ordered sequence of vertices which -C form a positively-oriented simple closed curve. Adjacent -C vertices need not be distinct but the curve must not be -C self-intersecting. Also, while polygon edges are by defi- -C nition restricted to a single hemisphere, R is not so -C restricted. Its interior is the region to the left as the -C vertices are traversed in order. -C -C The algorithm consists of selecting a point Q in R and -C then finding all points at which the great circle defined -C by P and Q intersects the boundary of R. P lies inside R -C if and only if there is an even number of intersection -C points between Q and P. Q is taken to be a point immedi- -C ately to the left of a directed boundary edge -- the first -C one that results in no consistency-check failures. -C -C If P is close to the polygon boundary, the problem is -C ill-conditioned and the decision may be incorrect. Also, -C an incorrect decision may result from a poor choice of Q -C (if, for example, a boundary edge lies on the great cir- -C cle defined by P and Q). A more reliable result could be -C obtained by a sequence of calls to INSIDE with the ver- -C tices cyclically permuted before each call (to alter the -C choice of Q). -C -C -C On input: -C -C P = Array of length 3 containing the Cartesian -C coordinates of the point (unit vector) to be -C located. -C -C LV = Length of arrays XV, YV, and ZV. -C -C XV,YV,ZV = Arrays of length LV containing the Carte- -C sian coordinates of unit vectors (points -C on the unit sphere). These values are -C not tested for validity. -C -C NV = Number of vertices in the polygon. 3 .LE. NV -C .LE. LV. -C -C LISTV = Array of length NV containing the indexes -C (for XV, YV, and ZV) of a cyclically-ordered -C (and CCW-ordered) sequence of vertices that -C define R. The last vertex (indexed by -C LISTV(NV)) is followed by the first (indexed -C by LISTV(1)). LISTV entries must be in the -C range 1 to LV. -C -C Input parameters are not altered by this function. -C -C On output: -C -C INSIDE = TRUE if and only if P lies inside R unless -C IER .NE. 0, in which case the value is not -C altered. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LV or NV is outside its valid -C range. -C IER = 2 if a LISTV entry is outside its valid -C range. -C IER = 3 if the polygon boundary was found to -C be self-intersecting. This error will -C not necessarily be detected. -C IER = 4 if every choice of Q (one for each -C boundary edge) led to failure of some -C internal consistency check. The most -C likely cause of this error is invalid -C input: P = (0,0,0), a null or self- -C intersecting polygon, etc. -C -C Module required by INSIDE: INTRSC -C -C Intrinsic function called by INSIDE: SQRT -C -C*********************************************************** -C - INTEGER I1, I2, IERR, IMX, K, K0, N, NI - LOGICAL EVEN, LFT1, LFT2, PINR, QINR - REAL B(3), BP, BQ, CN(3), D, EPS, PN(3), Q(3), - . QN(3), QNRM, V1(3), V2(3), VN(3), VNRM -C -C Local parameters: -C -C B = Intersection point between the boundary and -C the great circle defined by P and Q -C BP,BQ = and , respectively, maximized over -C intersection points B that lie between P and -C Q (on the shorter arc) -- used to find the -C closest intersection points to P and Q -C CN = Q X P = normal to the plane of P and Q -C D = Dot product or -C EPS = Parameter used to define Q as the point whose -C orthogonal distance to (the midpoint of) -C boundary edge V1->V2 is approximately EPS/ -C (2*Cos(A/2)), where = Cos(A). -C EVEN = TRUE iff an even number of intersection points -C lie between P and Q (on the shorter arc) -C I1,I2 = Indexes (LISTV elements) of a pair of adjacent -C boundary vertices (endpoints of a boundary -C edge) -C IERR = Error flag for calls to INTRSC (not tested) -C IMX = Local copy of LV and maximum value of I1 and -C I2 -C K = DO-loop index and LISTV index -C K0 = LISTV index of the first endpoint of the -C boundary edge used to compute Q -C LFT1,LFT2 = Logical variables associated with I1 and I2 in -C the boundary traversal: TRUE iff the vertex -C is strictly to the left of Q->P ( > 0) -C N = Local copy of NV -C NI = Number of intersections (between the boundary -C curve and the great circle P-Q) encountered -C PINR = TRUE iff P is to the left of the directed -C boundary edge associated with the closest -C intersection point to P that lies between P -C and Q (a left-to-right intersection as -C viewed from Q), or there is no intersection -C between P and Q (on the shorter arc) -C PN,QN = P X CN and CN X Q, respectively: used to -C locate intersections B relative to arc Q->P -C Q = (V1 + V2 + EPS*VN/VNRM)/QNRM, where V1->V2 is -C the boundary edge indexed by LISTV(K0) -> -C LISTV(K0+1) -C QINR = TRUE iff Q is to the left of the directed -C boundary edge associated with the closest -C intersection point to Q that lies between P -C and Q (a right-to-left intersection as -C viewed from Q), or there is no intersection -C between P and Q (on the shorter arc) -C QNRM = Euclidean norm of V1+V2+EPS*VN/VNRM used to -C compute (normalize) Q -C V1,V2 = Vertices indexed by I1 and I2 in the boundary -C traversal -C VN = V1 X V2, where V1->V2 is the boundary edge -C indexed by LISTV(K0) -> LISTV(K0+1) -C VNRM = Euclidean norm of VN -C - DATA EPS/1.E-3/ -C -C Store local parameters, test for error 1, and initialize -C K0. -C - IMX = LV - N = NV - IF (N .LT. 3 .OR. N .GT. IMX) GO TO 11 - K0 = 0 - I1 = LISTV(1) - IF (I1 .LT. 1 .OR. I1 .GT. IMX) GO TO 12 -C -C Increment K0 and set Q to a point immediately to the left -C of the midpoint of edge V1->V2 = LISTV(K0)->LISTV(K0+1): -C Q = (V1 + V2 + EPS*VN/VNRM)/QNRM, where VN = V1 X V2. -C - 1 K0 = K0 + 1 - IF (K0 .GT. N) GO TO 14 - I1 = LISTV(K0) - IF (K0 .LT. N) THEN - I2 = LISTV(K0+1) - ELSE - I2 = LISTV(1) - ENDIF - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - VN(1) = YV(I1)*ZV(I2) - ZV(I1)*YV(I2) - VN(2) = ZV(I1)*XV(I2) - XV(I1)*ZV(I2) - VN(3) = XV(I1)*YV(I2) - YV(I1)*XV(I2) - VNRM = SQRT(VN(1)*VN(1) + VN(2)*VN(2) + VN(3)*VN(3)) - IF (VNRM .EQ. 0.) GO TO 1 - Q(1) = XV(I1) + XV(I2) + EPS*VN(1)/VNRM - Q(2) = YV(I1) + YV(I2) + EPS*VN(2)/VNRM - Q(3) = ZV(I1) + ZV(I2) + EPS*VN(3)/VNRM - QNRM = SQRT(Q(1)*Q(1) + Q(2)*Q(2) + Q(3)*Q(3)) - Q(1) = Q(1)/QNRM - Q(2) = Q(2)/QNRM - Q(3) = Q(3)/QNRM -C -C Compute CN = Q X P, PN = P X CN, and QN = CN X Q. -C - CN(1) = Q(2)*P(3) - Q(3)*P(2) - CN(2) = Q(3)*P(1) - Q(1)*P(3) - CN(3) = Q(1)*P(2) - Q(2)*P(1) - IF (CN(1) .EQ. 0. .AND. CN(2) .EQ. 0. .AND. - . CN(3) .EQ. 0.) GO TO 1 - PN(1) = P(2)*CN(3) - P(3)*CN(2) - PN(2) = P(3)*CN(1) - P(1)*CN(3) - PN(3) = P(1)*CN(2) - P(2)*CN(1) - QN(1) = CN(2)*Q(3) - CN(3)*Q(2) - QN(2) = CN(3)*Q(1) - CN(1)*Q(3) - QN(3) = CN(1)*Q(2) - CN(2)*Q(1) -C -C Initialize parameters for the boundary traversal. -C - NI = 0 - EVEN = .TRUE. - BP = -2. - BQ = -2. - PINR = .TRUE. - QINR = .TRUE. - I2 = LISTV(N) - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - LFT2 = CN(1)*XV(I2) + CN(2)*YV(I2) + - . CN(3)*ZV(I2) .GT. 0. -C -C Loop on boundary arcs I1->I2. -C - DO 2 K = 1,N - I1 = I2 - LFT1 = LFT2 - I2 = LISTV(K) - IF (I2 .LT. 1 .OR. I2 .GT. IMX) GO TO 12 - LFT2 = CN(1)*XV(I2) + CN(2)*YV(I2) + - . CN(3)*ZV(I2) .GT. 0. - IF (LFT1 .EQV. LFT2) GO TO 2 -C -C I1 and I2 are on opposite sides of Q->P. Compute the -C point of intersection B. -C - NI = NI + 1 - V1(1) = XV(I1) - V1(2) = YV(I1) - V1(3) = ZV(I1) - V2(1) = XV(I2) - V2(2) = YV(I2) - V2(3) = ZV(I2) - CALL INTRSC (V1,V2,CN, B,IERR) -C -C B is between Q and P (on the shorter arc) iff -C B Forward Q->P and B Forward P->Q iff -C > 0 and > 0. -C - IF (B(1)*QN(1) + B(2)*QN(2) + B(3)*QN(3) .GT. 0. - . .AND. - . B(1)*PN(1) + B(2)*PN(2) + B(3)*PN(3) .GT. 0.) - . THEN -C -C Update EVEN, BQ, QINR, BP, and PINR. -C - EVEN = .NOT. EVEN - D = B(1)*Q(1) + B(2)*Q(2) + B(3)*Q(3) - IF (D .GT. BQ) THEN - BQ = D - QINR = LFT2 - ENDIF - D = B(1)*P(1) + B(2)*P(2) + B(3)*P(3) - IF (D .GT. BP) THEN - BP = D - PINR = LFT1 - ENDIF - ENDIF - 2 CONTINUE -C -C Test for consistency: NI must be even and QINR must be -C TRUE. -C - IF (NI .NE. 2*(NI/2) .OR. .NOT. QINR) GO TO 1 -C -C Test for error 3: different values of PINR and EVEN. -C - IF (PINR .NEQV. EVEN) GO TO 13 -C -C No error encountered. -C - IER = 0 - INSIDE = EVEN - RETURN -C -C LV or NV is outside its valid range. -C - 11 IER = 1 - RETURN -C -C A LISTV entry is outside its valid range. -C - 12 IER = 2 - RETURN -C -C The polygon boundary is self-intersecting. -C - 13 IER = 3 - RETURN -C -C Consistency tests failed for all values of Q. -C - 14 IER = 4 - RETURN - END FUNCTION - SUBROUTINE INTADD (KK,I1,I2,I3, LIST,LPTR,LEND,LNEW ) - INTEGER KK, I1, I2, I3, LIST(*), LPTR(*), LEND(*), - . LNEW -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/17/96 -C -C This subroutine adds an interior node to a triangulation -C of a set of points on the unit sphere. The data structure -C is updated with the insertion of node KK into the triangle -C whose vertices are I1, I2, and I3. No optimization of the -C triangulation is performed. -C -C This routine is identical to the similarly named routine -C in TRIPACK. -C -C -C On input: -C -C KK = Index of the node to be inserted. KK .GE. 1 -C and KK must not be equal to I1, I2, or I3. -C -C I1,I2,I3 = Indexes of the counterclockwise-ordered -C sequence of vertices of a triangle which -C contains node KK. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND,LNEW = Data structure defining the -C triangulation. Refer to Sub- -C routine TRMESH. Triangle -C (I1,I2,I3) must be included -C in the triangulation. -C -C On output: -C -C LIST,LPTR,LEND,LNEW = Data structure updated with -C the addition of node KK. KK -C will be connected to nodes I1, -C I2, and I3. -C -C Modules required by INTADD: INSERT, LSTPTR -C -C*********************************************************** -C - INTEGER K, LP, N1, N2, N3 -C -C Local parameters: -C -C K = Local copy of KK -C LP = LIST pointer -C N1,N2,N3 = Local copies of I1, I2, and I3 -C - K = KK -C -C Initialization. -C - N1 = I1 - N2 = I2 - N3 = I3 -C -C Add K as a neighbor of I1, I2, and I3. -C - LP = LSTPTR(LEND(N1),N2,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - LP = LSTPTR(LEND(N2),N3,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) - LP = LSTPTR(LEND(N3),N1,LIST,LPTR) - CALL INSERT (K,LP, LIST,LPTR,LNEW ) -C -C Add I1, I2, and I3 as neighbors of K. -C - LIST(LNEW) = N1 - LIST(LNEW+1) = N2 - LIST(LNEW+2) = N3 - LPTR(LNEW) = LNEW + 1 - LPTR(LNEW+1) = LNEW + 2 - LPTR(LNEW+2) = LNEW - LEND(K) = LNEW + 2 - LNEW = LNEW + 3 - RETURN - END SUBROUTINE - SUBROUTINE INTRSC (P1,P2,CN, P,IER) - INTEGER IER - REAL P1(3), P2(3), CN(3), P(3) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/19/90 -C -C Given a great circle C and points P1 and P2 defining an -C arc A on the surface of the unit sphere, where A is the -C shorter of the two portions of the great circle C12 assoc- -C iated with P1 and P2, this subroutine returns the point -C of intersection P between C and C12 that is closer to A. -C Thus, if P1 and P2 lie in opposite hemispheres defined by -C C, P is the point of intersection of C with A. -C -C -C On input: -C -C P1,P2 = Arrays of length 3 containing the Cartesian -C coordinates of unit vectors. -C -C CN = Array of length 3 containing the Cartesian -C coordinates of a nonzero vector which defines C -C as the intersection of the plane whose normal -C is CN with the unit sphere. Thus, if C is to -C be the great circle defined by P and Q, CN -C should be P X Q. -C -C The above parameters are not altered by this routine. -C -C P = Array of length 3. -C -C On output: -C -C P = Point of intersection defined above unless IER -C .NE. 0, in which case P is not altered. -C -C IER = Error indicator. -C IER = 0 if no errors were encountered. -C IER = 1 if = . This occurs -C iff P1 = P2 or CN = 0 or there are -C two intersection points at the same -C distance from A. -C IER = 2 if P2 = -P1 and the definition of A is -C therefore ambiguous. -C -C Modules required by INTRSC: None -C -C Intrinsic function called by INTRSC: SQRT -C -C*********************************************************** -C - INTEGER I - REAL D1, D2, PP(3), PPN, T -C -C Local parameters: -C -C D1 = -C D2 = -C I = DO-loop index -C PP = P1 + T*(P2-P1) = Parametric representation of the -C line defined by P1 and P2 -C PPN = Norm of PP -C T = D1/(D1-D2) = Parameter value chosen so that PP lies -C in the plane of C -C - D1 = CN(1)*P1(1) + CN(2)*P1(2) + CN(3)*P1(3) - D2 = CN(1)*P2(1) + CN(2)*P2(2) + CN(3)*P2(3) -C - IF (D1 .EQ. D2) THEN - IER = 1 - RETURN - ENDIF -C -C Solve for T such that = 0 and compute PP and PPN. -C - T = D1/(D1-D2) - PPN = 0. - DO 1 I = 1,3 - PP(I) = P1(I) + T*(P2(I)-P1(I)) - PPN = PPN + PP(I)*PP(I) - 1 CONTINUE -C -C PPN = 0 iff PP = 0 iff P2 = -P1 (and T = .5). -C - IF (PPN .EQ. 0.) THEN - IER = 2 - RETURN - ENDIF - PPN = SQRT(PPN) -C -C Compute P = PP/PPN. -C - DO 2 I = 1,3 - P(I) = PP(I)/PPN - 2 CONTINUE - IER = 0 - RETURN - END SUBROUTINE - INTEGER FUNCTION JRAND (N, IX,IY,IZ ) - INTEGER N, IX, IY, IZ -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C This function returns a uniformly distributed pseudo- -C random integer in the range 1 to N. -C -C -C On input: -C -C N = Maximum value to be returned. -C -C N is not altered by this function. -C -C IX,IY,IZ = Integer seeds initialized to values in -C the range 1 to 30,000 before the first -C call to JRAND, and not altered between -C subsequent calls (unless a sequence of -C random numbers is to be repeated by -C reinitializing the seeds). -C -C On output: -C -C IX,IY,IZ = Updated integer seeds. -C -C JRAND = Random integer in the range 1 to N. -C -C Reference: B. A. Wichmann and I. D. Hill, "An Efficient -C and Portable Pseudo-random Number Generator", -C Applied Statistics, Vol. 31, No. 2, 1982, -C pp. 188-190. -C -C Modules required by JRAND: None -C -C Intrinsic functions called by JRAND: INT, MOD, REAL -C -C*********************************************************** -C - REAL U, X -C -C Local parameters: -C -C U = Pseudo-random number uniformly distributed in the -C interval (0,1). -C X = Pseudo-random number in the range 0 to 3 whose frac- -C tional part is U. -C - IX = MOD(171*IX,30269) - IY = MOD(172*IY,30307) - IZ = MOD(170*IZ,30323) - X = (REAL(IX)/30269.) + (REAL(IY)/30307.) + - . (REAL(IZ)/30323.) - U = X - INT(X) - JRAND = REAL(N)*U + 1. - RETURN - END FUNCTION - LOGICAL FUNCTION LEFT (X1,Y1,Z1,X2,Y2,Z2,X0,Y0,Z0) - REAL X1, Y1, Z1, X2, Y2, Z2, X0, Y0, Z0 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function determines whether node N0 is in the -C (closed) left hemisphere defined by the plane containing -C N1, N2, and the origin, where left is defined relative to -C an observer at N1 facing N2. -C -C -C On input: -C -C X1,Y1,Z1 = Coordinates of N1. -C -C X2,Y2,Z2 = Coordinates of N2. -C -C X0,Y0,Z0 = Coordinates of N0. -C -C Input parameters are not altered by this function. -C -C On output: -C -C LEFT = TRUE if and only if N0 is in the closed -C left hemisphere. -C -C Modules required by LEFT: None -C -C*********************************************************** -C -C LEFT = TRUE iff = det(N0,N1,N2) .GE. 0. -C - LEFT = X0*(Y1*Z2-Y2*Z1) - Y0*(X1*Z2-X2*Z1) + - . Z0*(X1*Y2-X2*Y1) .GE. 0. - RETURN - END FUNCTION - INTEGER FUNCTION LSTPTR (LPL,NB,LIST,LPTR) - INTEGER LPL, NB, LIST(*), LPTR(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function returns the index (LIST pointer) of NB in -C the adjacency list for N0, where LPL = LEND(N0). -C -C This function is identical to the similarly named -C function in TRIPACK. -C -C -C On input: -C -C LPL = LEND(N0) -C -C NB = Index of the node whose pointer is to be re- -C turned. NB must be connected to N0. -C -C LIST,LPTR = Data structure defining the triangula- -C tion. Refer to Subroutine TRMESH. -C -C Input parameters are not altered by this function. -C -C On output: -C -C LSTPTR = Pointer such that LIST(LSTPTR) = NB or -C LIST(LSTPTR) = -NB, unless NB is not a -C neighbor of N0, in which case LSTPTR = LPL. -C -C Modules required by LSTPTR: None -C -C*********************************************************** -C - INTEGER LP, ND -C -C Local parameters: -C -C LP = LIST pointer -C ND = Nodal index -C - LP = LPTR(LPL) - 1 ND = LIST(LP) - IF (ND .EQ. NB) GO TO 2 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 1 -C - 2 LSTPTR = LP - RETURN - END FUNCTION - INTEGER FUNCTION NBCNT (LPL,LPTR) - INTEGER LPL, LPTR(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/15/96 -C -C This function returns the number of neighbors of a node -C N0 in a triangulation created by Subroutine TRMESH. -C -C This function is identical to the similarly named -C function in TRIPACK. -C -C -C On input: -C -C LPL = LIST pointer to the last neighbor of N0 -- -C LPL = LEND(N0). -C -C LPTR = Array of pointers associated with LIST. -C -C Input parameters are not altered by this function. -C -C On output: -C -C NBCNT = Number of neighbors of N0. -C -C Modules required by NBCNT: None -C -C*********************************************************** -C - INTEGER K, LP -C -C Local parameters: -C -C K = Counter for computing the number of neighbors -C LP = LIST pointer -C - LP = LPL - K = 1 -C - 1 LP = LPTR(LP) - IF (LP .EQ. LPL) GO TO 2 - K = K + 1 - GO TO 1 -C - 2 NBCNT = K - RETURN - END FUNCTION - INTEGER FUNCTION NEARND (P,IST,N,X,Y,Z,LIST,LPTR, - . LEND, AL) - INTEGER IST, N, LIST(*), LPTR(*), LEND(N) - REAL P(3), X(N), Y(N), Z(N), AL -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/28/98 -C -C Given a point P on the surface of the unit sphere and a -C Delaunay triangulation created by Subroutine TRMESH, this -C function returns the index of the nearest triangulation -C node to P. -C -C The algorithm consists of implicitly adding P to the -C triangulation, finding the nearest neighbor to P, and -C implicitly deleting P from the triangulation. Thus, it -C is based on the fact that, if P is a node in a Delaunay -C triangulation, the nearest node to P is a neighbor of P. -C -C -C On input: -C -C P = Array of length 3 containing the Cartesian coor- -C dinates of the point P to be located relative to -C the triangulation. It is assumed without a test -C that P(1)**2 + P(2)**2 + P(3)**2 = 1. -C -C IST = Index of a node at which TRFIND begins the -C search. Search time depends on the proximity -C of this node to P. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to TRMESH. -C -C Input parameters are not altered by this function. -C -C On output: -C -C NEARND = Nodal index of the nearest node to P, or 0 -C if N < 3 or the triangulation data struc- -C ture is invalid. -C -C AL = Arc length (angular distance in radians) be- -C tween P and NEARND unless NEARND = 0. -C -C Note that the number of candidates for NEARND -C (neighbors of P) is limited to LMAX defined in -C the PARAMETER statement below. -C -C Modules required by NEARND: JRAND, LSTPTR, TRFIND, STORE -C -C Intrinsic functions called by NEARND: ABS, ACOS -C -C*********************************************************** -C - INTEGER LMAX - PARAMETER (LMAX=25) - INTEGER I1, I2, I3, L, LISTP(LMAX), LP, LP1, LP2, - . LPL, LPTRP(LMAX), N1, N2, N3, NN, NR, NST - REAL B1, B2, B3, DS1, DSR, DX1, DX2, DX3, DY1, - . DY2, DY3, DZ1, DZ2, DZ3 -C -C Local parameters: -C -C B1,B2,B3 = Unnormalized barycentric coordinates returned -C by TRFIND -C DS1 = (Negative cosine of the) distance from P to N1 -C DSR = (Negative cosine of the) distance from P to NR -C DX1,..DZ3 = Components of vectors used by the swap test -C I1,I2,I3 = Nodal indexes of a triangle containing P, or -C the rightmost (I1) and leftmost (I2) visible -C boundary nodes as viewed from P -C L = Length of LISTP/LPTRP and number of neighbors -C of P -C LMAX = Maximum value of L -C LISTP = Indexes of the neighbors of P -C LPTRP = Array of pointers in 1-1 correspondence with -C LISTP elements -C LP = LIST pointer to a neighbor of N1 and LISTP -C pointer -C LP1,LP2 = LISTP indexes (pointers) -C LPL = Pointer to the last neighbor of N1 -C N1 = Index of a node visible from P -C N2 = Index of an endpoint of an arc opposite P -C N3 = Index of the node opposite N1->N2 -C NN = Local copy of N -C NR = Index of a candidate for the nearest node to P -C NST = Index of the node at which TRFIND begins the -C search -C -C -C Store local parameters and test for N invalid. -C - NN = N - IF (NN .LT. 3) GO TO 6 - NST = IST - IF (NST .LT. 1 .OR. NST .GT. NN) NST = 1 -C -C Find a triangle (I1,I2,I3) containing P, or the rightmost -C (I1) and leftmost (I2) visible boundary nodes as viewed -C from P. -C - CALL TRFIND (NST,P,N,X,Y,Z,LIST,LPTR,LEND, B1,B2,B3, - . I1,I2,I3) -C -C Test for collinear nodes. -C - IF (I1 .EQ. 0) GO TO 6 -C -C Store the linked list of 'neighbors' of P in LISTP and -C LPTRP. I1 is the first neighbor, and 0 is stored as -C the last neighbor if P is not contained in a triangle. -C L is the length of LISTP and LPTRP, and is limited to -C LMAX. -C - IF (I3 .NE. 0) THEN - LISTP(1) = I1 - LPTRP(1) = 2 - LISTP(2) = I2 - LPTRP(2) = 3 - LISTP(3) = I3 - LPTRP(3) = 1 - L = 3 - ELSE - N1 = I1 - L = 1 - LP1 = 2 - LISTP(L) = N1 - LPTRP(L) = LP1 -C -C Loop on the ordered sequence of visible boundary nodes -C N1 from I1 to I2. -C - 1 LPL = LEND(N1) - N1 = -LIST(LPL) - L = LP1 - LP1 = L+1 - LISTP(L) = N1 - LPTRP(L) = LP1 - IF (N1 .NE. I2 .AND. LP1 .LT. LMAX) GO TO 1 - L = LP1 - LISTP(L) = 0 - LPTRP(L) = 1 - ENDIF -C -C Initialize variables for a loop on arcs N1-N2 opposite P -C in which new 'neighbors' are 'swapped' in. N1 follows -C N2 as a neighbor of P, and LP1 and LP2 are the LISTP -C indexes of N1 and N2. -C - LP2 = 1 - N2 = I1 - LP1 = LPTRP(1) - N1 = LISTP(LP1) -C -C Begin loop: find the node N3 opposite N1->N2. -C - 2 LP = LSTPTR(LEND(N1),N2,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 3 - LP = LPTR(LP) - N3 = ABS(LIST(LP)) -C -C Swap test: Exit the loop if L = LMAX. -C - IF (L .EQ. LMAX) GO TO 4 - DX1 = X(N1) - P(1) - DY1 = Y(N1) - P(2) - DZ1 = Z(N1) - P(3) -C - DX2 = X(N2) - P(1) - DY2 = Y(N2) - P(2) - DZ2 = Z(N2) - P(3) -C - DX3 = X(N3) - P(1) - DY3 = Y(N3) - P(2) - DZ3 = Z(N3) - P(3) - IF ( DX3*(DY2*DZ1 - DY1*DZ2) - - . DY3*(DX2*DZ1 - DX1*DZ2) + - . DZ3*(DX2*DY1 - DX1*DY2) .LE. 0. ) GO TO 3 -C -C Swap: Insert N3 following N2 in the adjacency list for P. -C The two new arcs opposite P must be tested. -C - L = L+1 - LPTRP(LP2) = L - LISTP(L) = N3 - LPTRP(L) = LP1 - LP1 = L - N1 = N3 - GO TO 2 -C -C No swap: Advance to the next arc and test for termination -C on N1 = I1 (LP1 = 1) or N1 followed by 0. -C - 3 IF (LP1 .EQ. 1) GO TO 4 - LP2 = LP1 - N2 = N1 - LP1 = LPTRP(LP1) - N1 = LISTP(LP1) - IF (N1 .EQ. 0) GO TO 4 - GO TO 2 -C -C Set NR and DSR to the index of the nearest node to P and -C an increasing function (negative cosine) of its distance -C from P, respectively. -C - 4 NR = I1 - DSR = -(X(NR)*P(1) + Y(NR)*P(2) + Z(NR)*P(3)) - DO 5 LP = 2,L - N1 = LISTP(LP) - IF (N1 .EQ. 0) GO TO 5 - DS1 = -(X(N1)*P(1) + Y(N1)*P(2) + Z(N1)*P(3)) - IF (DS1 .LT. DSR) THEN - NR = N1 - DSR = DS1 - ENDIF - 5 CONTINUE - DSR = -DSR - IF (DSR .GT. 1.0) DSR = 1.0 - AL = ACOS(DSR) - NEARND = NR - RETURN -C -C Invalid input. -C - 6 NEARND = 0 - RETURN - END FUNCTION - SUBROUTINE OPTIM (X,Y,Z,NA, LIST,LPTR,LEND,NIT, - . IWK, IER) - INTEGER NA, LIST(*), LPTR(*), LEND(*), NIT, IWK(2,NA), - . IER - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/30/98 -C -C Given a set of NA triangulation arcs, this subroutine -C optimizes the portion of the triangulation consisting of -C the quadrilaterals (pairs of adjacent triangles) which -C have the arcs as diagonals by applying the circumcircle -C test and appropriate swaps to the arcs. -C -C An iteration consists of applying the swap test and -C swaps to all NA arcs in the order in which they are -C stored. The iteration is repeated until no swap occurs -C or NIT iterations have been performed. The bound on the -C number of iterations may be necessary to prevent an -C infinite loop caused by cycling (reversing the effect of a -C previous swap) due to floating point inaccuracy when four -C or more nodes are nearly cocircular. -C -C -C On input: -C -C X,Y,Z = Arrays containing the nodal coordinates. -C -C NA = Number of arcs in the set. NA .GE. 0. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C NIT = Maximum number of iterations to be performed. -C NIT = 4*NA should be sufficient. NIT .GE. 1. -C -C IWK = Integer array dimensioned 2 by NA containing -C the nodal indexes of the arc endpoints (pairs -C of endpoints are stored in columns). -C -C On output: -C -C LIST,LPTR,LEND = Updated triangulation data struc- -C ture reflecting the swaps. -C -C NIT = Number of iterations performed. -C -C IWK = Endpoint indexes of the new set of arcs -C reflecting the swaps. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if a swap occurred on the last of -C MAXIT iterations, where MAXIT is the -C value of NIT on input. The new set -C of arcs is not necessarily optimal -C in this case. -C IER = 2 if NA < 0 or NIT < 1 on input. -C IER = 3 if IWK(2,I) is not a neighbor of -C IWK(1,I) for some I in the range 1 -C to NA. A swap may have occurred in -C this case. -C IER = 4 if a zero pointer was returned by -C Subroutine SWAP. -C -C Modules required by OPTIM: LSTPTR, SWAP, SWPTST -C -C Intrinsic function called by OPTIM: ABS -C -C*********************************************************** -C - INTEGER I, IO1, IO2, ITER, LP, LP21, LPL, LPP, MAXIT, - . N1, N2, NNA - LOGICAL SWP -C -C Local parameters: -C -C I = Column index for IWK -C IO1,IO2 = Nodal indexes of the endpoints of an arc in IWK -C ITER = Iteration count -C LP = LIST pointer -C LP21 = Parameter returned by SWAP (not used) -C LPL = Pointer to the last neighbor of IO1 -C LPP = Pointer to the node preceding IO2 as a neighbor -C of IO1 -C MAXIT = Input value of NIT -C N1,N2 = Nodes opposite IO1->IO2 and IO2->IO1, -C respectively -C NNA = Local copy of NA -C SWP = Flag set to TRUE iff a swap occurs in the -C optimization loop -C - NNA = NA - MAXIT = NIT - IF (NNA .LT. 0 .OR. MAXIT .LT. 1) GO TO 7 -C -C Initialize iteration count ITER and test for NA = 0. -C - ITER = 0 - IF (NNA .EQ. 0) GO TO 5 -C -C Top of loop -- -C SWP = TRUE iff a swap occurred in the current iteration. -C - 1 IF (ITER .EQ. MAXIT) GO TO 6 - ITER = ITER + 1 - SWP = .FALSE. -C -C Inner loop on arcs IO1-IO2 -- -C - DO 4 I = 1,NNA - IO1 = IWK(1,I) - IO2 = IWK(2,I) -C -C Set N1 and N2 to the nodes opposite IO1->IO2 and -C IO2->IO1, respectively. Determine the following: -C -C LPL = pointer to the last neighbor of IO1, -C LP = pointer to IO2 as a neighbor of IO1, and -C LPP = pointer to the node N2 preceding IO2. -C - LPL = LEND(IO1) - LPP = LPL - LP = LPTR(LPP) - 2 IF (LIST(LP) .EQ. IO2) GO TO 3 - LPP = LP - LP = LPTR(LPP) - IF (LP .NE. LPL) GO TO 2 -C -C IO2 should be the last neighbor of IO1. Test for no -C arc and bypass the swap test if IO1 is a boundary -C node. -C - IF (ABS(LIST(LP)) .NE. IO2) GO TO 8 - IF (LIST(LP) .LT. 0) GO TO 4 -C -C Store N1 and N2, or bypass the swap test if IO1 is a -C boundary node and IO2 is its first neighbor. -C - 3 N2 = LIST(LPP) - IF (N2 .LT. 0) GO TO 4 - LP = LPTR(LP) - N1 = ABS(LIST(LP)) -C -C Test IO1-IO2 for a swap, and update IWK if necessary. -C - IF ( .NOT. SWPTST(N1,N2,IO1,IO2,X,Y,Z) ) GO TO 4 - CALL SWAP (N1,N2,IO1,IO2, LIST,LPTR,LEND, LP21) - IF (LP21 .EQ. 0) GO TO 9 - SWP = .TRUE. - IWK(1,I) = N1 - IWK(2,I) = N2 - 4 CONTINUE - IF (SWP) GO TO 1 -C -C Successful termination. -C - 5 NIT = ITER - IER = 0 - RETURN -C -C MAXIT iterations performed without convergence. -C - 6 NIT = MAXIT - IER = 1 - RETURN -C -C Invalid input parameter. -C - 7 NIT = 0 - IER = 2 - RETURN -C -C IO2 is not a neighbor of IO1. -C - 8 NIT = ITER - IER = 3 - RETURN -C -C Zero pointer returned by SWAP. -C - 9 NIT = ITER - IER = 4 - RETURN - END SUBROUTINE - SUBROUTINE SCOORD (PX,PY,PZ, PLAT,PLON,PNRM) - REAL PX, PY, PZ, PLAT, PLON, PNRM -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 08/27/90 -C -C This subroutine converts a point P from Cartesian coor- -C dinates to spherical coordinates. -C -C -C On input: -C -C PX,PY,PZ = Cartesian coordinates of P. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C PLAT = Latitude of P in the range -PI/2 to PI/2, or -C 0 if PNRM = 0. PLAT should be scaled by -C 180/PI to obtain the value in degrees. -C -C PLON = Longitude of P in the range -PI to PI, or 0 -C if P lies on the Z-axis. PLON should be -C scaled by 180/PI to obtain the value in -C degrees. -C -C PNRM = Magnitude (Euclidean norm) of P. -C -C Modules required by SCOORD: None -C -C Intrinsic functions called by SCOORD: ASIN, ATAN2, SQRT -C -C*********************************************************** -C - PNRM = SQRT(PX*PX + PY*PY + PZ*PZ) - IF (PX .NE. 0. .OR. PY .NE. 0.) THEN - PLON = ATAN2(PY,PX) - ELSE - PLON = 0. - ENDIF - IF (PNRM .NE. 0.) THEN - PLAT = ASIN(PZ/PNRM) - ELSE - PLAT = 0. - ENDIF - RETURN - END SUBROUTINE - REAL FUNCTION STORE (X) - REAL X -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 05/09/92 -C -C This function forces its argument X to be stored in a -C memory location, thus providing a means of determining -C floating point number characteristics (such as the machine -C precision) when it is necessary to avoid computation in -C high precision registers. -C -C -C On input: -C -C X = Value to be stored. -C -C X is not altered by this function. -C -C On output: -C -C STORE = Value of X after it has been stored and -C possibly truncated or rounded to the single -C precision word length. -C -C Modules required by STORE: None -C -C*********************************************************** -C - REAL Y - COMMON/STCOM/Y - Y = X - STORE = Y - RETURN - END FUNCTION - SUBROUTINE SWAP (IN1,IN2,IO1,IO2, LIST,LPTR, - . LEND, LP21) - INTEGER IN1, IN2, IO1, IO2, LIST(*), LPTR(*), LEND(*), - . LP21 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 06/22/98 -C -C Given a triangulation of a set of points on the unit -C sphere, this subroutine replaces a diagonal arc in a -C strictly convex quadrilateral (defined by a pair of adja- -C cent triangles) with the other diagonal. Equivalently, a -C pair of adjacent triangles is replaced by another pair -C having the same union. -C -C -C On input: -C -C IN1,IN2,IO1,IO2 = Nodal indexes of the vertices of -C the quadrilateral. IO1-IO2 is re- -C placed by IN1-IN2. (IO1,IO2,IN1) -C and (IO2,IO1,IN2) must be trian- -C gles on input. -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C On output: -C -C LIST,LPTR,LEND = Data structure updated with the -C swap -- triangles (IO1,IO2,IN1) and -C (IO2,IO1,IN2) are replaced by -C (IN1,IN2,IO2) and (IN2,IN1,IO1) -C unless LP21 = 0. -C -C LP21 = Index of IN1 as a neighbor of IN2 after the -C swap is performed unless IN1 and IN2 are -C adjacent on input, in which case LP21 = 0. -C -C Module required by SWAP: LSTPTR -C -C Intrinsic function called by SWAP: ABS -C -C*********************************************************** -C - INTEGER LP, LPH, LPSAV -C -C Local parameters: -C -C LP,LPH,LPSAV = LIST pointers -C -C -C Test for IN1 and IN2 adjacent. -C - LP = LSTPTR(LEND(IN1),IN2,LIST,LPTR) - IF (ABS(LIST(LP)) .EQ. IN2) THEN - LP21 = 0 - RETURN - ENDIF -C -C Delete IO2 as a neighbor of IO1. -C - LP = LSTPTR(LEND(IO1),IN2,LIST,LPTR) - LPH = LPTR(LP) - LPTR(LP) = LPTR(LPH) -C -C If IO2 is the last neighbor of IO1, make IN2 the -C last neighbor. -C - IF (LEND(IO1) .EQ. LPH) LEND(IO1) = LP -C -C Insert IN2 as a neighbor of IN1 following IO1 -C using the hole created above. -C - LP = LSTPTR(LEND(IN1),IO1,LIST,LPTR) - LPSAV = LPTR(LP) - LPTR(LP) = LPH - LIST(LPH) = IN2 - LPTR(LPH) = LPSAV -C -C Delete IO1 as a neighbor of IO2. -C - LP = LSTPTR(LEND(IO2),IN1,LIST,LPTR) - LPH = LPTR(LP) - LPTR(LP) = LPTR(LPH) -C -C If IO1 is the last neighbor of IO2, make IN1 the -C last neighbor. -C - IF (LEND(IO2) .EQ. LPH) LEND(IO2) = LP -C -C Insert IN1 as a neighbor of IN2 following IO2. -C - LP = LSTPTR(LEND(IN2),IO2,LIST,LPTR) - LPSAV = LPTR(LP) - LPTR(LP) = LPH - LIST(LPH) = IN1 - LPTR(LPH) = LPSAV - LP21 = LPH - RETURN - END SUBROUTINE - LOGICAL FUNCTION SWPTST (N1,N2,N3,N4,X,Y,Z) - INTEGER N1, N2, N3, N4 - REAL X(*), Y(*), Z(*) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 03/29/91 -C -C This function decides whether or not to replace a -C diagonal arc in a quadrilateral with the other diagonal. -C The decision will be to swap (SWPTST = TRUE) if and only -C if N4 lies above the plane (in the half-space not contain- -C ing the origin) defined by (N1,N2,N3), or equivalently, if -C the projection of N4 onto this plane is interior to the -C circumcircle of (N1,N2,N3). The decision will be for no -C swap if the quadrilateral is not strictly convex. -C -C -C On input: -C -C N1,N2,N3,N4 = Indexes of the four nodes defining the -C quadrilateral with N1 adjacent to N2, -C and (N1,N2,N3) in counterclockwise -C order. The arc connecting N1 to N2 -C should be replaced by an arc connec- -C ting N3 to N4 if SWPTST = TRUE. Refer -C to Subroutine SWAP. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes. (X(I),Y(I),Z(I)) -C define node I for I = N1, N2, N3, and N4. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C SWPTST = TRUE if and only if the arc connecting N1 -C and N2 should be swapped for an arc con- -C necting N3 and N4. -C -C Modules required by SWPTST: None -C -C*********************************************************** -C - REAL DX1, DX2, DX3, DY1, DY2, DY3, DZ1, DZ2, DZ3, - . X4, Y4, Z4 -C -C Local parameters: -C -C DX1,DY1,DZ1 = Coordinates of N4->N1 -C DX2,DY2,DZ2 = Coordinates of N4->N2 -C DX3,DY3,DZ3 = Coordinates of N4->N3 -C X4,Y4,Z4 = Coordinates of N4 -C - X4 = X(N4) - Y4 = Y(N4) - Z4 = Z(N4) - DX1 = X(N1) - X4 - DX2 = X(N2) - X4 - DX3 = X(N3) - X4 - DY1 = Y(N1) - Y4 - DY2 = Y(N2) - Y4 - DY3 = Y(N3) - Y4 - DZ1 = Z(N1) - Z4 - DZ2 = Z(N2) - Z4 - DZ3 = Z(N3) - Z4 -C -C N4 lies above the plane of (N1,N2,N3) iff N3 lies above -C the plane of (N2,N1,N4) iff Det(N3-N4,N2-N4,N1-N4) = -C (N3-N4,N2-N4 X N1-N4) > 0. -C - SWPTST = DX3*(DY2*DZ1 - DY1*DZ2) - . -DY3*(DX2*DZ1 - DX1*DZ2) - . +DZ3*(DX2*DY1 - DX1*DY2) .GT. 0. - RETURN - END FUNCTION - SUBROUTINE TRANS (N,RLAT,RLON, X,Y,Z) - INTEGER N - REAL RLAT(N), RLON(N), X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 04/08/90 -C -C This subroutine transforms spherical coordinates into -C Cartesian coordinates on the unit sphere for input to -C Subroutine TRMESH. Storage for X and Y may coincide with -C storage for RLAT and RLON if the latter need not be saved. -C -C -C On input: -C -C N = Number of nodes (points on the unit sphere) -C whose coordinates are to be transformed. -C -C RLAT = Array of length N containing latitudinal -C coordinates of the nodes in radians. -C -C RLON = Array of length N containing longitudinal -C coordinates of the nodes in radians. -C -C The above parameters are not altered by this routine. -C -C X,Y,Z = Arrays of length at least N. -C -C On output: -C -C X,Y,Z = Cartesian coordinates in the range -1 to 1. -C X(I)**2 + Y(I)**2 + Z(I)**2 = 1 for I = 1 -C to N. -C -C Modules required by TRANS: None -C -C Intrinsic functions called by TRANS: COS, SIN -C -C*********************************************************** -C - INTEGER I, NN - REAL COSPHI, PHI, THETA -C -C Local parameters: -C -C COSPHI = cos(PHI) -C I = DO-loop index -C NN = Local copy of N -C PHI = Latitude -C THETA = Longitude -C - NN = N - DO 1 I = 1,NN - PHI = RLAT(I) - THETA = RLON(I) - COSPHI = COS(PHI) - X(I) = COSPHI*COS(THETA) - Y(I) = COSPHI*SIN(THETA) - Z(I) = SIN(PHI) - 1 CONTINUE - RETURN - END SUBROUTINE - SUBROUTINE TRFIND (NST,P,N,X,Y,Z,LIST,LPTR,LEND, B1, - . B2,B3,I1,I2,I3) - INTEGER NST, N, LIST(*), LPTR(*), LEND(N), I1, I2, I3 - REAL P(3), X(N), Y(N), Z(N), B1, B2, B3 -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 11/30/99 -C -C This subroutine locates a point P relative to a triangu- -C lation created by Subroutine TRMESH. If P is contained in -C a triangle, the three vertex indexes and barycentric coor- -C dinates are returned. Otherwise, the indexes of the -C visible boundary nodes are returned. -C -C -C On input: -C -C NST = Index of a node at which TRFIND begins its -C search. Search time depends on the proximity -C of this node to P. -C -C P = Array of length 3 containing the x, y, and z -C coordinates (in that order) of the point P to be -C located. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the triangulation nodes (unit -C vectors). (X(I),Y(I),Z(I)) defines node I -C for I = 1 to N. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C B1,B2,B3 = Unnormalized barycentric coordinates of -C the central projection of P onto the un- -C derlying planar triangle if P is in the -C convex hull of the nodes. These parame- -C ters are not altered if I1 = 0. -C -C I1,I2,I3 = Counterclockwise-ordered vertex indexes -C of a triangle containing P if P is con- -C tained in a triangle. If P is not in the -C convex hull of the nodes, I1 and I2 are -C the rightmost and leftmost (boundary) -C nodes that are visible from P, and -C I3 = 0. (If all boundary nodes are vis- -C ible from P, then I1 and I2 coincide.) -C I1 = I2 = I3 = 0 if P and all of the -C nodes are coplanar (lie on a common great -C circle. -C -C Modules required by TRFIND: JRAND, LSTPTR, STORE -C -C Intrinsic function called by TRFIND: ABS -C -C*********************************************************** -C - INTEGER IX, IY, IZ, LP, N0, N1, N1S, N2, N2S, N3, N4, - . NEXT, NF, NL - REAL DET, EPS, PTN1, PTN2, Q(3), S12, TOL, XP, YP, - . ZP - REAL X0, X1, X2, Y0, Y1, Y2, Z0, Z1, Z2 -C - SAVE IX, IY, IZ - DATA IX/1/, IY/2/, IZ/3/ -C -C Local parameters: -C -C EPS = Machine precision -C IX,IY,IZ = Integer seeds for JRAND -C LP = LIST pointer -C N0,N1,N2 = Nodes in counterclockwise order defining a -C cone (with vertex N0) containing P, or end- -C points of a boundary edge such that P Right -C N1->N2 -C N1S,N2S = Initially-determined values of N1 and N2 -C N3,N4 = Nodes opposite N1->N2 and N2->N1, respectively -C NEXT = Candidate for I1 or I2 when P is exterior -C NF,NL = First and last neighbors of N0, or first -C (rightmost) and last (leftmost) nodes -C visible from P when P is exterior to the -C triangulation -C PTN1 = Scalar product -C PTN2 = Scalar product -C Q = (N2 X N1) X N2 or N1 X (N2 X N1) -- used in -C the boundary traversal when P is exterior -C S12 = Scalar product -C TOL = Tolerance (multiple of EPS) defining an upper -C bound on the magnitude of a negative bary- -C centric coordinate (B1 or B2) for P in a -C triangle -- used to avoid an infinite number -C of restarts with 0 <= B3 < EPS and B1 < 0 or -C B2 < 0 but small in magnitude -C XP,YP,ZP = Local variables containing P(1), P(2), and P(3) -C X0,Y0,Z0 = Dummy arguments for DET -C X1,Y1,Z1 = Dummy arguments for DET -C X2,Y2,Z2 = Dummy arguments for DET -C -C Statement function: -C -C DET(X1,...,Z0) .GE. 0 if and only if (X0,Y0,Z0) is in the -C (closed) left hemisphere defined by -C the plane containing (0,0,0), -C (X1,Y1,Z1), and (X2,Y2,Z2), where -C left is defined relative to an ob- -C server at (X1,Y1,Z1) facing -C (X2,Y2,Z2). -C - DET (X1,Y1,Z1,X2,Y2,Z2,X0,Y0,Z0) = X0*(Y1*Z2-Y2*Z1) - . - Y0*(X1*Z2-X2*Z1) + Z0*(X1*Y2-X2*Y1) -C -C Initialize variables. -C - XP = P(1) - YP = P(2) - ZP = P(3) - N0 = NST - IF (N0 .LT. 1 .OR. N0 .GT. N) - . N0 = JRAND(N, IX,IY,IZ ) -C -C Compute the relative machine precision EPS and TOL. -C - EPS = 1.E0 - 1 EPS = EPS/2.E0 - IF (STORE(EPS+1.E0) .GT. 1.E0) GO TO 1 - EPS = 2.E0*EPS - TOL = 100.E0*EPS -C -C Set NF and NL to the first and last neighbors of N0, and -C initialize N1 = NF. -C - 2 LP = LEND(N0) - NL = LIST(LP) - LP = LPTR(LP) - NF = LIST(LP) - N1 = NF -C -C Find a pair of adjacent neighbors N1,N2 of N0 that define -C a wedge containing P: P LEFT N0->N1 and P RIGHT N0->N2. -C - IF (NL .GT. 0) THEN -C -C N0 is an interior node. Find N1. -C - 3 IF ( DET(X(N0),Y(N0),Z(N0),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) .LT. 0. ) THEN - LP = LPTR(LP) - N1 = LIST(LP) - IF (N1 .EQ. NL) GO TO 6 - GO TO 3 - ENDIF - ELSE -C -C N0 is a boundary node. Test for P exterior. -C - NL = -NL - IF ( DET(X(N0),Y(N0),Z(N0),X(NF),Y(NF),Z(NF), - . XP,YP,ZP) .LT. 0. ) THEN -C -C P is to the right of the boundary edge N0->NF. -C - N1 = N0 - N2 = NF - GO TO 9 - ENDIF - IF ( DET(X(NL),Y(NL),Z(NL),X(N0),Y(N0),Z(N0), - . XP,YP,ZP) .LT. 0. ) THEN -C -C P is to the right of the boundary edge NL->N0. -C - N1 = NL - N2 = N0 - GO TO 9 - ENDIF - ENDIF -C -C P is to the left of arcs N0->N1 and NL->N0. Set N2 to the -C next neighbor of N0 (following N1). -C - 4 LP = LPTR(LP) - N2 = ABS(LIST(LP)) - IF ( DET(X(N0),Y(N0),Z(N0),X(N2),Y(N2),Z(N2), - . XP,YP,ZP) .LT. 0. ) GO TO 7 - N1 = N2 - IF (N1 .NE. NL) GO TO 4 - IF ( DET(X(N0),Y(N0),Z(N0),X(NF),Y(NF),Z(NF), - . XP,YP,ZP) .LT. 0. ) GO TO 6 -C -C P is left of or on arcs N0->NB for all neighbors NB -C of N0. Test for P = +/-N0. -C - IF (STORE(ABS(X(N0)*XP + Y(N0)*YP + Z(N0)*ZP)) - . .LT. 1.0-4.0*EPS) THEN -C -C All points are collinear iff P Left NB->N0 for all -C neighbors NB of N0. Search the neighbors of N0. -C Note: N1 = NL and LP points to NL. -C - 5 IF ( DET(X(N1),Y(N1),Z(N1),X(N0),Y(N0),Z(N0), - . XP,YP,ZP) .GE. 0. ) THEN - LP = LPTR(LP) - N1 = ABS(LIST(LP)) - IF (N1 .EQ. NL) GO TO 14 - GO TO 5 - ENDIF - ENDIF -C -C P is to the right of N1->N0, or P = +/-N0. Set N0 to N1 -C and start over. -C - N0 = N1 - GO TO 2 -C -C P is between arcs N0->N1 and N0->NF. -C - 6 N2 = NF -C -C P is contained in a wedge defined by geodesics N0-N1 and -C N0-N2, where N1 is adjacent to N2. Save N1 and N2 to -C test for cycling. -C - 7 N3 = N0 - N1S = N1 - N2S = N2 -C -C Top of edge-hopping loop: -C - 8 B3 = DET(X(N1),Y(N1),Z(N1),X(N2),Y(N2),Z(N2),XP,YP,ZP) - IF (B3 .LT. 0.) THEN -C -C Set N4 to the first neighbor of N2 following N1 (the -C node opposite N2->N1) unless N1->N2 is a boundary arc. -C - LP = LSTPTR(LEND(N2),N1,LIST,LPTR) - IF (LIST(LP) .LT. 0) GO TO 9 - LP = LPTR(LP) - N4 = ABS(LIST(LP)) -C -C Define a new arc N1->N2 which intersects the geodesic -C N0-P. -C - IF ( DET(X(N0),Y(N0),Z(N0),X(N4),Y(N4),Z(N4), - . XP,YP,ZP) .LT. 0. ) THEN - N3 = N2 - N2 = N4 - N1S = N1 - IF (N2 .NE. N2S .AND. N2 .NE. N0) GO TO 8 - ELSE - N3 = N1 - N1 = N4 - N2S = N2 - IF (N1 .NE. N1S .AND. N1 .NE. N0) GO TO 8 - ENDIF -C -C The starting node N0 or edge N1-N2 was encountered -C again, implying a cycle (infinite loop). Restart -C with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF -C -C P is in (N1,N2,N3) unless N0, N1, N2, and P are collinear -C or P is close to -N0. -C - IF (B3 .GE. EPS) THEN -C -C B3 .NE. 0. -C - B1 = DET(X(N2),Y(N2),Z(N2),X(N3),Y(N3),Z(N3), - . XP,YP,ZP) - B2 = DET(X(N3),Y(N3),Z(N3),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) - IF (B1 .LT. -TOL .OR. B2 .LT. -TOL) THEN -C -C Restart with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF - ELSE -C -C B3 = 0 and thus P lies on N1->N2. Compute -C B1 = Det(P,N2 X N1,N2) and B2 = Det(P,N1,N2 X N1). -C - B3 = 0. - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - PTN1 = XP*X(N1) + YP*Y(N1) + ZP*Z(N1) - PTN2 = XP*X(N2) + YP*Y(N2) + ZP*Z(N2) - B1 = PTN1 - S12*PTN2 - B2 = PTN2 - S12*PTN1 - IF (B1 .LT. -TOL .OR. B2 .LT. -TOL) THEN -C -C Restart with N0 randomly selected. -C - N0 = JRAND(N, IX,IY,IZ ) - GO TO 2 - ENDIF - ENDIF -C -C P is in (N1,N2,N3). -C - I1 = N1 - I2 = N2 - I3 = N3 - IF (B1 .LT. 0.0) B1 = 0.0 - IF (B2 .LT. 0.0) B2 = 0.0 - RETURN -C -C P Right N1->N2, where N1->N2 is a boundary edge. -C Save N1 and N2, and set NL = 0 to indicate that -C NL has not yet been found. -C - 9 N1S = N1 - N2S = N2 - NL = 0 -C -C Counterclockwise Boundary Traversal: -C - 10 LP = LEND(N2) - LP = LPTR(LP) - NEXT = LIST(LP) - IF ( DET(X(N2),Y(N2),Z(N2),X(NEXT),Y(NEXT),Z(NEXT), - . XP,YP,ZP) .GE. 0. ) THEN -C -C N2 is the rightmost visible node if P Forward N2->N1 -C or NEXT Forward N2->N1. Set Q to (N2 X N1) X N2. -C - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - Q(1) = X(N1) - S12*X(N2) - Q(2) = Y(N1) - S12*Y(N2) - Q(3) = Z(N1) - S12*Z(N2) - IF (XP*Q(1) + YP*Q(2) + ZP*Q(3) .GE. 0.) GO TO 11 - IF (X(NEXT)*Q(1) + Y(NEXT)*Q(2) + Z(NEXT)*Q(3) - . .GE. 0.) GO TO 11 -C -C N1, N2, NEXT, and P are nearly collinear, and N2 is -C the leftmost visible node. -C - NL = N2 - ENDIF -C -C Bottom of counterclockwise loop: -C - N1 = N2 - N2 = NEXT - IF (N2 .NE. N1S) GO TO 10 -C -C All boundary nodes are visible from P. -C - I1 = N1S - I2 = N1S - I3 = 0 - RETURN -C -C N2 is the rightmost visible node. -C - 11 NF = N2 - IF (NL .EQ. 0) THEN -C -C Restore initial values of N1 and N2, and begin the search -C for the leftmost visible node. -C - N2 = N2S - N1 = N1S -C -C Clockwise Boundary Traversal: -C - 12 LP = LEND(N1) - NEXT = -LIST(LP) - IF ( DET(X(NEXT),Y(NEXT),Z(NEXT),X(N1),Y(N1),Z(N1), - . XP,YP,ZP) .GE. 0. ) THEN -C -C N1 is the leftmost visible node if P or NEXT is -C forward of N1->N2. Compute Q = N1 X (N2 X N1). -C - S12 = X(N1)*X(N2) + Y(N1)*Y(N2) + Z(N1)*Z(N2) - Q(1) = X(N2) - S12*X(N1) - Q(2) = Y(N2) - S12*Y(N1) - Q(3) = Z(N2) - S12*Z(N1) - IF (XP*Q(1) + YP*Q(2) + ZP*Q(3) .GE. 0.) GO TO 13 - IF (X(NEXT)*Q(1) + Y(NEXT)*Q(2) + Z(NEXT)*Q(3) - . .GE. 0.) GO TO 13 -C -C P, NEXT, N1, and N2 are nearly collinear and N1 is the -C rightmost visible node. -C - NF = N1 - ENDIF -C -C Bottom of clockwise loop: -C - N2 = N1 - N1 = NEXT - IF (N1 .NE. N1S) GO TO 12 -C -C All boundary nodes are visible from P. -C - I1 = N1 - I2 = N1 - I3 = 0 - RETURN -C -C N1 is the leftmost visible node. -C - 13 NL = N1 - ENDIF -C -C NF and NL have been found. -C - I1 = NF - I2 = NL - I3 = 0 - RETURN -C -C All points are collinear (coplanar). -C - 14 I1 = 0 - I2 = 0 - I3 = 0 - RETURN - END SUBROUTINE - SUBROUTINE TRLIST (N,LIST,LPTR,LEND,NROW, NT,LTRI,IER) - INTEGER N, LIST(*), LPTR(*), LEND(N), NROW, NT, - . LTRI(NROW,*), IER -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/20/96 -C -C This subroutine converts a triangulation data structure -C from the linked list created by Subroutine TRMESH to a -C triangle list. -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C LIST,LPTR,LEND = Linked list data structure defin- -C ing the triangulation. Refer to -C Subroutine TRMESH. -C -C NROW = Number of rows (entries per triangle) re- -C served for the triangle list LTRI. The value -C must be 6 if only the vertex indexes and -C neighboring triangle indexes are to be -C stored, or 9 if arc indexes are also to be -C assigned and stored. Refer to LTRI. -C -C The above parameters are not altered by this routine. -C -C LTRI = Integer array of length at least NROW*NT, -C where NT is at most 2N-4. (A sufficient -C length is 12N if NROW=6 or 18N if NROW=9.) -C -C On output: -C -C NT = Number of triangles in the triangulation unless -C IER .NE. 0, in which case NT = 0. NT = 2N-NB-2 -C if NB .GE. 3 or 2N-4 if NB = 0, where NB is the -C number of boundary nodes. -C -C LTRI = NROW by NT array whose J-th column contains -C the vertex nodal indexes (first three rows), -C neighboring triangle indexes (second three -C rows), and, if NROW = 9, arc indexes (last -C three rows) associated with triangle J for -C J = 1,...,NT. The vertices are ordered -C counterclockwise with the first vertex taken -C to be the one with smallest index. Thus, -C LTRI(2,J) and LTRI(3,J) are larger than -C LTRI(1,J) and index adjacent neighbors of -C node LTRI(1,J). For I = 1,2,3, LTRI(I+3,J) -C and LTRI(I+6,J) index the triangle and arc, -C respectively, which are opposite (not shared -C by) node LTRI(I,J), with LTRI(I+3,J) = 0 if -C LTRI(I+6,J) indexes a boundary arc. Vertex -C indexes range from 1 to N, triangle indexes -C from 0 to NT, and, if included, arc indexes -C from 1 to NA, where NA = 3N-NB-3 if NB .GE. 3 -C or 3N-6 if NB = 0. The triangles are or- -C dered on first (smallest) vertex indexes. -C -C IER = Error indicator. -C IER = 0 if no errors were encountered. -C IER = 1 if N or NROW is outside its valid -C range on input. -C IER = 2 if the triangulation data structure -C (LIST,LPTR,LEND) is invalid. Note, -C however, that these arrays are not -C completely tested for validity. -C -C Modules required by TRLIST: None -C -C Intrinsic function called by TRLIST: ABS -C -C*********************************************************** -C - INTEGER I, I1, I2, I3, ISV, J, KA, KN, KT, LP, LP2, - . LPL, LPLN1, N1, N2, N3, NM2 - LOGICAL ARCS -C -C Local parameters: -C -C ARCS = Logical variable with value TRUE iff are -C indexes are to be stored -C I,J = LTRI row indexes (1 to 3) associated with -C triangles KT and KN, respectively -C I1,I2,I3 = Nodal indexes of triangle KN -C ISV = Variable used to permute indexes I1,I2,I3 -C KA = Arc index and number of currently stored arcs -C KN = Index of the triangle that shares arc I1-I2 -C with KT -C KT = Triangle index and number of currently stored -C triangles -C LP = LIST pointer -C LP2 = Pointer to N2 as a neighbor of N1 -C LPL = Pointer to the last neighbor of I1 -C LPLN1 = Pointer to the last neighbor of N1 -C N1,N2,N3 = Nodal indexes of triangle KT -C NM2 = N-2 -C -C -C Test for invalid input parameters. -C - IF (N .LT. 3 .OR. (NROW .NE. 6 .AND. NROW .NE. 9)) - . GO TO 11 -C -C Initialize parameters for loop on triangles KT = (N1,N2, -C N3), where N1 < N2 and N1 < N3. -C -C ARCS = TRUE iff arc indexes are to be stored. -C KA,KT = Numbers of currently stored arcs and triangles. -C NM2 = Upper bound on candidates for N1. -C - ARCS = NROW .EQ. 9 - KA = 0 - KT = 0 - NM2 = N-2 -C -C Loop on nodes N1. -C - DO 9 N1 = 1,NM2 -C -C Loop on pairs of adjacent neighbors (N2,N3). LPLN1 points -C to the last neighbor of N1, and LP2 points to N2. -C - LPLN1 = LEND(N1) - LP2 = LPLN1 - 1 LP2 = LPTR(LP2) - N2 = LIST(LP2) - LP = LPTR(LP2) - N3 = ABS(LIST(LP)) - IF (N2 .LT. N1 .OR. N3 .LT. N1) GO TO 8 -C -C Add a new triangle KT = (N1,N2,N3). -C - KT = KT + 1 - LTRI(1,KT) = N1 - LTRI(2,KT) = N2 - LTRI(3,KT) = N3 -C -C Loop on triangle sides (I2,I1) with neighboring triangles -C KN = (I1,I2,I3). -C - DO 7 I = 1,3 - IF (I .EQ. 1) THEN - I1 = N3 - I2 = N2 - ELSEIF (I .EQ. 2) THEN - I1 = N1 - I2 = N3 - ELSE - I1 = N2 - I2 = N1 - ENDIF -C -C Set I3 to the neighbor of I1 that follows I2 unless -C I2->I1 is a boundary arc. -C - LPL = LEND(I1) - LP = LPTR(LPL) - 2 IF (LIST(LP) .EQ. I2) GO TO 3 - LP = LPTR(LP) - IF (LP .NE. LPL) GO TO 2 -C -C I2 is the last neighbor of I1 unless the data structure -C is invalid. Bypass the search for a neighboring -C triangle if I2->I1 is a boundary arc. -C - IF (ABS(LIST(LP)) .NE. I2) GO TO 12 - KN = 0 - IF (LIST(LP) .LT. 0) GO TO 6 -C -C I2->I1 is not a boundary arc, and LP points to I2 as -C a neighbor of I1. -C - 3 LP = LPTR(LP) - I3 = ABS(LIST(LP)) -C -C Find J such that LTRI(J,KN) = I3 (not used if KN > KT), -C and permute the vertex indexes of KN so that I1 is -C smallest. -C - IF (I1 .LT. I2 .AND. I1 .LT. I3) THEN - J = 3 - ELSEIF (I2 .LT. I3) THEN - J = 2 - ISV = I1 - I1 = I2 - I2 = I3 - I3 = ISV - ELSE - J = 1 - ISV = I1 - I1 = I3 - I3 = I2 - I2 = ISV - ENDIF -C -C Test for KN > KT (triangle index not yet assigned). -C - IF (I1 .GT. N1) GO TO 7 -C -C Find KN, if it exists, by searching the triangle list in -C reverse order. -C - DO 4 KN = KT-1,1,-1 - IF (LTRI(1,KN) .EQ. I1 .AND. LTRI(2,KN) .EQ. - . I2 .AND. LTRI(3,KN) .EQ. I3) GO TO 5 - 4 CONTINUE - GO TO 7 -C -C Store KT as a neighbor of KN. -C - 5 LTRI(J+3,KN) = KT -C -C Store KN as a neighbor of KT, and add a new arc KA. -C - 6 LTRI(I+3,KT) = KN - IF (ARCS) THEN - KA = KA + 1 - LTRI(I+6,KT) = KA - IF (KN .NE. 0) LTRI(J+6,KN) = KA - ENDIF - 7 CONTINUE -C -C Bottom of loop on triangles. -C - 8 IF (LP2 .NE. LPLN1) GO TO 1 - 9 CONTINUE -C -C No errors encountered. -C - NT = KT - IER = 0 - RETURN -C -C Invalid input parameter. -C - 11 NT = 0 - IER = 1 - RETURN -C -C Invalid triangulation data structure: I1 is a neighbor of -C I2, but I2 is not a neighbor of I1. -C - 12 NT = 0 - IER = 2 - RETURN - END SUBROUTINE - SUBROUTINE TRLPRT (N,X,Y,Z,IFLAG,NROW,NT,LTRI,LOUT) - INTEGER N, IFLAG, NROW, NT, LTRI(NROW,NT), LOUT - REAL X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/02/98 -C -C This subroutine prints the triangle list created by Sub- -C routine TRLIST and, optionally, the nodal coordinates -C (either latitude and longitude or Cartesian coordinates) -C on logical unit LOUT. The numbers of boundary nodes, -C triangles, and arcs are also printed. -C -C -C On input: -C -C N = Number of nodes in the triangulation. -C 3 .LE. N .LE. 9999. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes if IFLAG = 0, or -C (X and Y only) arrays of length N containing -C longitude and latitude, respectively, if -C IFLAG > 0, or unused dummy parameters if -C IFLAG < 0. -C -C IFLAG = Nodal coordinate option indicator: -C IFLAG = 0 if X, Y, and Z (assumed to contain -C Cartesian coordinates) are to be -C printed (to 6 decimal places). -C IFLAG > 0 if only X and Y (assumed to con- -C tain longitude and latitude) are -C to be printed (to 6 decimal -C places). -C IFLAG < 0 if only the adjacency lists are to -C be printed. -C -C NROW = Number of rows (entries per triangle) re- -C served for the triangle list LTRI. The value -C must be 6 if only the vertex indexes and -C neighboring triangle indexes are stored, or 9 -C if arc indexes are also stored. -C -C NT = Number of triangles in the triangulation. -C 1 .LE. NT .LE. 9999. -C -C LTRI = NROW by NT array whose J-th column contains -C the vertex nodal indexes (first three rows), -C neighboring triangle indexes (second three -C rows), and, if NROW = 9, arc indexes (last -C three rows) associated with triangle J for -C J = 1,...,NT. -C -C LOUT = Logical unit number for output. If LOUT is -C not in the range 0 to 99, output is written -C to unit 6. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C The triangle list and nodal coordinates (as specified by -C IFLAG) are written to unit LOUT. -C -C Modules required by TRLPRT: None -C -C*********************************************************** -C - INTEGER I, K, LUN, NA, NB, NL, NLMAX, NMAX - DATA NMAX/9999/, NLMAX/58/ -C -C Local parameters: -C -C I = DO-loop, nodal index, and row index for LTRI -C K = DO-loop and triangle index -C LUN = Logical unit number for output -C NA = Number of triangulation arcs -C NB = Number of boundary nodes -C NL = Number of lines printed on the current page -C NLMAX = Maximum number of print lines per page (except -C for the last page which may have two addi- -C tional lines) -C NMAX = Maximum value of N and NT (4-digit format) -C - LUN = LOUT - IF (LUN .LT. 0 .OR. LUN .GT. 99) LUN = 6 -C -C Print a heading and test for invalid input. -C - WRITE (LUN,100) N - NL = 3 - IF (N .LT. 3 .OR. N .GT. NMAX .OR. - . (NROW .NE. 6 .AND. NROW .NE. 9) .OR. - . NT .LT. 1 .OR. NT .GT. NMAX) THEN -C -C Print an error message and exit. -C - WRITE (LUN,110) N, NROW, NT - RETURN - ENDIF - IF (IFLAG .EQ. 0) THEN -C -C Print X, Y, and Z. -C - WRITE (LUN,101) - NL = 6 - DO 1 I = 1,N - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,103) I, X(I), Y(I), Z(I) - NL = NL + 1 - 1 CONTINUE - ELSEIF (IFLAG .GT. 0) THEN -C -C Print X (longitude) and Y (latitude). -C - WRITE (LUN,102) - NL = 6 - DO 2 I = 1,N - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,104) I, X(I), Y(I) - NL = NL + 1 - 2 CONTINUE - ENDIF -C -C Print the triangulation LTRI. -C - IF (NL .GT. NLMAX/2) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - IF (NROW .EQ. 6) THEN - WRITE (LUN,105) - ELSE - WRITE (LUN,106) - ENDIF - NL = NL + 5 - DO 3 K = 1,NT - IF (NL .GE. NLMAX) THEN - WRITE (LUN,108) - NL = 0 - ENDIF - WRITE (LUN,107) K, (LTRI(I,K), I = 1,NROW) - NL = NL + 1 - 3 CONTINUE -C -C Print NB, NA, and NT (boundary nodes, arcs, and -C triangles). -C - NB = 2*N - NT - 2 - IF (NB .LT. 3) THEN - NB = 0 - NA = 3*N - 6 - ELSE - NA = NT + N - 1 - ENDIF - WRITE (LUN,109) NB, NA, NT - RETURN -C -C Print formats: -C - 100 FORMAT (///18X,'STRIPACK (TRLIST) Output, N = ',I4) - 101 FORMAT (//8X,'Node',10X,'X(Node)',10X,'Y(Node)',10X, - . 'Z(Node)'//) - 102 FORMAT (//16X,'Node',8X,'Longitude',9X,'Latitude'//) - 103 FORMAT (8X,I4,3E17.6) - 104 FORMAT (16X,I4,2E17.6) - 105 FORMAT (//1X,'Triangle',8X,'Vertices',12X,'Neighbors'/ - . 4X,'KT',7X,'N1',5X,'N2',5X,'N3',4X,'KT1',4X, - . 'KT2',4X,'KT3'/) - 106 FORMAT (//1X,'Triangle',8X,'Vertices',12X,'Neighbors', - . 14X,'Arcs'/ - . 4X,'KT',7X,'N1',5X,'N2',5X,'N3',4X,'KT1',4X, - . 'KT2',4X,'KT3',4X,'KA1',4X,'KA2',4X,'KA3'/) - 107 FORMAT (2X,I4,2X,6(3X,I4),3(2X,I5)) - 108 FORMAT (///) - 109 FORMAT (/1X,'NB = ',I4,' Boundary Nodes',5X, - . 'NA = ',I5,' Arcs',5X,'NT = ',I5, - . ' Triangles') - 110 FORMAT (//1X,10X,'*** Invalid Parameter: N =',I5, - . ', NROW =',I5,', NT =',I5,' ***') - END SUBROUTINE - SUBROUTINE TRMESH (N,X,Y,Z, LIST,LPTR,LEND,LNEW,NEAR, - . NEXT,DIST,IER) - INTEGER N, LIST(*), LPTR(*), LEND(N), LNEW, NEAR(N), - . NEXT(N), IER - REAL X(N), Y(N), Z(N), DIST(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/08/99 -C -C This subroutine creates a Delaunay triangulation of a -C set of N arbitrarily distributed points, referred to as -C nodes, on the surface of the unit sphere. The Delaunay -C triangulation is defined as a set of (spherical) triangles -C with the following five properties: -C -C 1) The triangle vertices are nodes. -C 2) No triangle contains a node other than its vertices. -C 3) The interiors of the triangles are pairwise disjoint. -C 4) The union of triangles is the convex hull of the set -C of nodes (the smallest convex set that contains -C the nodes). If the nodes are not contained in a -C single hemisphere, their convex hull is the en- -C tire sphere and there are no boundary nodes. -C Otherwise, there are at least three boundary nodes. -C 5) The interior of the circumcircle of each triangle -C contains no node. -C -C The first four properties define a triangulation, and the -C last property results in a triangulation which is as close -C as possible to equiangular in a certain sense and which is -C uniquely defined unless four or more nodes lie in a common -C plane. This property makes the triangulation well-suited -C for solving closest-point problems and for triangle-based -C interpolation. -C -C Provided the nodes are randomly ordered, the algorithm -C has expected time complexity O(N*log(N)) for most nodal -C distributions. Note, however, that the complexity may be -C as high as O(N**2) if, for example, the nodes are ordered -C on increasing latitude. -C -C Spherical coordinates (latitude and longitude) may be -C converted to Cartesian coordinates by Subroutine TRANS. -C -C The following is a list of the software package modules -C which a user may wish to call directly: -C -C ADDNOD - Updates the triangulation by appending a new -C node. -C -C AREAS - Returns the area of a spherical triangle. -C -C BNODES - Returns an array containing the indexes of the -C boundary nodes (if any) in counterclockwise -C order. Counts of boundary nodes, triangles, -C and arcs are also returned. -C -C CIRCUM - Returns the circumcenter of a spherical trian- -C gle. -C -C CRLIST - Returns the set of triangle circumcenters -C (Voronoi vertices) and circumradii associated -C with a triangulation. -C -C DELARC - Deletes a boundary arc from a triangulation. -C -C DELNOD - Updates the triangulation with a nodal deletion. -C -C EDGE - Forces an arbitrary pair of nodes to be connec- -C ted by an arc in the triangulation. -C -C GETNP - Determines the ordered sequence of L closest -C nodes to a given node, along with the associ- -C ated distances. -C -C INSIDE - Locates a point relative to a polygon on the -C surface of the sphere. -C -C INTRSC - Returns the point of intersection between a -C pair of great circle arcs. -C -C JRAND - Generates a uniformly distributed pseudo-random -C integer. -C -C LEFT - Locates a point relative to a great circle. -C -C NEARND - Returns the index of the nearest node to an -C arbitrary point, along with its squared -C distance. -C -C SCOORD - Converts a point from Cartesian coordinates to -C spherical coordinates. -C -C STORE - Forces a value to be stored in main memory so -C that the precision of floating point numbers -C in memory locations rather than registers is -C computed. -C -C TRANS - Transforms spherical coordinates into Cartesian -C coordinates on the unit sphere for input to -C Subroutine TRMESH. -C -C TRLIST - Converts the triangulation data structure to a -C triangle list more suitable for use in a fin- -C ite element code. -C -C TRLPRT - Prints the triangle list created by Subroutine -C TRLIST. -C -C TRMESH - Creates a Delaunay triangulation of a set of -C nodes. -C -C TRPLOT - Creates a level-2 Encapsulated Postscript (EPS) -C file containing a triangulation plot. -C -C TRPRNT - Prints the triangulation data structure and, -C optionally, the nodal coordinates. -C -C VRPLOT - Creates a level-2 Encapsulated Postscript (EPS) -C file containing a Voronoi diagram plot. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of distinct nodes. (X(K),Y(K), -C Z(K)) is referred to as node K, and K is re- -C ferred to as a nodal index. It is required -C that X(K)**2 + Y(K)**2 + Z(K)**2 = 1 for all -C K. The first three nodes must not be col- -C linear (lie on a common great circle). -C -C The above parameters are not altered by this routine. -C -C LIST,LPTR = Arrays of length at least 6N-12. -C -C LEND = Array of length at least N. -C -C NEAR,NEXT,DIST = Work space arrays of length at -C least N. The space is used to -C efficiently determine the nearest -C triangulation node to each un- -C processed node for use by ADDNOD. -C -C On output: -C -C LIST = Set of nodal indexes which, along with LPTR, -C LEND, and LNEW, define the triangulation as a -C set of N adjacency lists -- counterclockwise- -C ordered sequences of neighboring nodes such -C that the first and last neighbors of a bound- -C ary node are boundary nodes (the first neigh- -C bor of an interior node is arbitrary). In -C order to distinguish between interior and -C boundary nodes, the last neighbor of each -C boundary node is represented by the negative -C of its index. -C -C LPTR = Set of pointers (LIST indexes) in one-to-one -C correspondence with the elements of LIST. -C LIST(LPTR(I)) indexes the node which follows -C LIST(I) in cyclical counterclockwise order -C (the first neighbor follows the last neigh- -C bor). -C -C LEND = Set of pointers to adjacency lists. LEND(K) -C points to the last neighbor of node K for -C K = 1,...,N. Thus, LIST(LEND(K)) < 0 if and -C only if K is a boundary node. -C -C LNEW = Pointer to the first empty location in LIST -C and LPTR (list length plus one). LIST, LPTR, -C LEND, and LNEW are not altered if IER < 0, -C and are incomplete if IER > 0. -C -C NEAR,NEXT,DIST = Garbage. -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = -1 if N < 3 on input. -C IER = -2 if the first three nodes are -C collinear. -C IER = L if nodes L and M coincide for some -C M > L. The data structure represents -C a triangulation of nodes 1 to M-1 in -C this case. -C -C Modules required by TRMESH: ADDNOD, BDYADD, COVSPH, -C INSERT, INTADD, JRAND, -C LEFT, LSTPTR, STORE, SWAP, -C SWPTST, TRFIND -C -C Intrinsic function called by TRMESH: ABS -C -C*********************************************************** -C - INTEGER I, I0, J, K, LP, LPL, NEXTI, NN - REAL D, D1, D2, D3 -C -C Local parameters: -C -C D = (Negative cosine of) distance from node K to -C node I -C D1,D2,D3 = Distances from node K to nodes 1, 2, and 3, -C respectively -C I,J = Nodal indexes -C I0 = Index of the node preceding I in a sequence of -C unprocessed nodes: I = NEXT(I0) -C K = Index of node to be added and DO-loop index: -C K > 3 -C LP = LIST index (pointer) of a neighbor of K -C LPL = Pointer to the last neighbor of K -C NEXTI = NEXT(I) -C NN = Local copy of N -C - NN = N - IF (NN .LT. 3) THEN - IER = -1 - RETURN - ENDIF -C -C Store the first triangle in the linked list. -C - IF ( .NOT. LEFT (X(1),Y(1),Z(1),X(2),Y(2),Z(2), - . X(3),Y(3),Z(3)) ) THEN -C -C The first triangle is (3,2,1) = (2,1,3) = (1,3,2). -C - LIST(1) = 3 - LPTR(1) = 2 - LIST(2) = -2 - LPTR(2) = 1 - LEND(1) = 2 -C - LIST(3) = 1 - LPTR(3) = 4 - LIST(4) = -3 - LPTR(4) = 3 - LEND(2) = 4 -C - LIST(5) = 2 - LPTR(5) = 6 - LIST(6) = -1 - LPTR(6) = 5 - LEND(3) = 6 -C - ELSEIF ( .NOT. LEFT(X(2),Y(2),Z(2),X(1),Y(1),Z(1), - . X(3),Y(3),Z(3)) ) - . THEN -C -C The first triangle is (1,2,3): 3 Strictly Left 1->2, -C i.e., node 3 lies in the left hemisphere defined by -C arc 1->2. -C - LIST(1) = 2 - LPTR(1) = 2 - LIST(2) = -3 - LPTR(2) = 1 - LEND(1) = 2 -C - LIST(3) = 3 - LPTR(3) = 4 - LIST(4) = -1 - LPTR(4) = 3 - LEND(2) = 4 -C - LIST(5) = 1 - LPTR(5) = 6 - LIST(6) = -2 - LPTR(6) = 5 - LEND(3) = 6 -C - ELSE -C -C The first three nodes are collinear. -C - IER = -2 - RETURN - ENDIF -C -C Initialize LNEW and test for N = 3. -C - LNEW = 7 - IF (NN .EQ. 3) THEN - IER = 0 - RETURN - ENDIF -C -C A nearest-node data structure (NEAR, NEXT, and DIST) is -C used to obtain an expected-time (N*log(N)) incremental -C algorithm by enabling constant search time for locating -C each new node in the triangulation. -C -C For each unprocessed node K, NEAR(K) is the index of the -C triangulation node closest to K (used as the starting -C point for the search in Subroutine TRFIND) and DIST(K) -C is an increasing function of the arc length (angular -C distance) between nodes K and NEAR(K): -Cos(a) for arc -C length a. -C -C Since it is necessary to efficiently find the subset of -C unprocessed nodes associated with each triangulation -C node J (those that have J as their NEAR entries), the -C subsets are stored in NEAR and NEXT as follows: for -C each node J in the triangulation, I = NEAR(J) is the -C first unprocessed node in J's set (with I = 0 if the -C set is empty), L = NEXT(I) (if I > 0) is the second, -C NEXT(L) (if L > 0) is the third, etc. The nodes in each -C set are initially ordered by increasing indexes (which -C maximizes efficiency) but that ordering is not main- -C tained as the data structure is updated. -C -C Initialize the data structure for the single triangle. -C - NEAR(1) = 0 - NEAR(2) = 0 - NEAR(3) = 0 - DO 1 K = NN,4,-1 - D1 = -(X(K)*X(1) + Y(K)*Y(1) + Z(K)*Z(1)) - D2 = -(X(K)*X(2) + Y(K)*Y(2) + Z(K)*Z(2)) - D3 = -(X(K)*X(3) + Y(K)*Y(3) + Z(K)*Z(3)) - IF (D1 .LE. D2 .AND. D1 .LE. D3) THEN - NEAR(K) = 1 - DIST(K) = D1 - NEXT(K) = NEAR(1) - NEAR(1) = K - ELSEIF (D2 .LE. D1 .AND. D2 .LE. D3) THEN - NEAR(K) = 2 - DIST(K) = D2 - NEXT(K) = NEAR(2) - NEAR(2) = K - ELSE - NEAR(K) = 3 - DIST(K) = D3 - NEXT(K) = NEAR(3) - NEAR(3) = K - ENDIF - 1 CONTINUE -C -C Add the remaining nodes -C - DO 6 K = 4,NN - CALL ADDNOD (NEAR(K),K,X,Y,Z, LIST,LPTR,LEND, - . LNEW, IER) - IF (IER .NE. 0) RETURN -C -C Remove K from the set of unprocessed nodes associated -C with NEAR(K). -C - I = NEAR(K) - IF (NEAR(I) .EQ. K) THEN - NEAR(I) = NEXT(K) - ELSE - I = NEAR(I) - 2 I0 = I - I = NEXT(I0) - IF (I .NE. K) GO TO 2 - NEXT(I0) = NEXT(K) - ENDIF - NEAR(K) = 0 -C -C Loop on neighbors J of node K. -C - LPL = LEND(K) - LP = LPL - 3 LP = LPTR(LP) - J = ABS(LIST(LP)) -C -C Loop on elements I in the sequence of unprocessed nodes -C associated with J: K is a candidate for replacing J -C as the nearest triangulation node to I. The next value -C of I in the sequence, NEXT(I), must be saved before I -C is moved because it is altered by adding I to K's set. -C - I = NEAR(J) - 4 IF (I .EQ. 0) GO TO 5 - NEXTI = NEXT(I) -C -C Test for the distance from I to K less than the distance -C from I to J. -C - D = -(X(I)*X(K) + Y(I)*Y(K) + Z(I)*Z(K)) - IF (D .LT. DIST(I)) THEN -C -C Replace J by K as the nearest triangulation node to I: -C update NEAR(I) and DIST(I), and remove I from J's set -C of unprocessed nodes and add it to K's set. -C - NEAR(I) = K - DIST(I) = D - IF (I .EQ. NEAR(J)) THEN - NEAR(J) = NEXTI - ELSE - NEXT(I0) = NEXTI - ENDIF - NEXT(I) = NEAR(K) - NEAR(K) = I - ELSE - I0 = I - ENDIF -C -C Bottom of loop on I. -C - I = NEXTI - GO TO 4 -C -C Bottom of loop on neighbors J. -C - 5 IF (LP .NE. LPL) GO TO 3 - 6 CONTINUE - RETURN - END SUBROUTINE - SUBROUTINE TRPLOT (LUN,PLTSIZ,ELAT,ELON,A,N,X,Y,Z, - . LIST,LPTR,LEND,TITLE,NUMBR, IER) - CHARACTER*(*) TITLE - INTEGER LUN, N, LIST(*), LPTR(*), LEND(N), IER - LOGICAL NUMBR - REAL PLTSIZ, ELAT, ELON, A, X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/16/98 -C -C This subroutine creates a level-2 Encapsulated Post- -C script (EPS) file containing a graphical display of a -C triangulation of a set of nodes on the unit sphere. The -C visible nodes are projected onto the plane that contains -C the origin and has normal defined by a user-specified eye- -C position. Projections of adjacent (visible) nodes are -C connected by line segments. -C -C -C On input: -C -C LUN = Logical unit number in the range 0 to 99. -C The unit should be opened with an appropriate -C file name before the call to this routine. -C -C PLTSIZ = Plot size in inches. A circular window in -C the projection plane is mapped to a circu- -C lar viewport with diameter equal to .88* -C PLTSIZ (leaving room for labels outside the -C viewport). The viewport is centered on the -C 8.5 by 11 inch page, and its boundary is -C drawn. 1.0 .LE. PLTSIZ .LE. 8.5. -C -C ELAT,ELON = Latitude and longitude (in degrees) of -C the center of projection E (the center -C of the plot). The projection plane is -C the plane that contains the origin and -C has E as unit normal. In a rotated -C coordinate system for which E is the -C north pole, the projection plane con- -C tains the equator, and only northern -C hemisphere nodes are visible (from the -C point at infinity in the direction E). -C These are projected orthogonally onto -C the projection plane (by zeroing the z- -C component in the rotated coordinate -C system). ELAT and ELON must be in the -C range -90 to 90 and -180 to 180, respec- -C tively. -C -C A = Angular distance in degrees from E to the boun- -C dary of a circular window against which the -C triangulation is clipped. The projected window -C is a disk of radius r = Sin(A) centered at the -C origin, and only visible nodes whose projections -C are within distance r of the origin are included -C in the plot. Thus, if A = 90, the plot includes -C the entire hemisphere centered at E. 0 .LT. A -C .LE. 90. -C -C N = Number of nodes in the triangulation. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C TITLE = Type CHARACTER variable or constant contain- -C ing a string to be centered above the plot. -C The string must be enclosed in parentheses; -C i.e., the first and last characters must be -C '(' and ')', respectively, but these are not -C displayed. TITLE may have at most 80 char- -C acters including the parentheses. -C -C NUMBR = Option indicator: If NUMBR = TRUE, the -C nodal indexes are plotted next to the nodes. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LUN, PLTSIZ, or N is outside its -C valid range. -C IER = 2 if ELAT, ELON, or A is outside its -C valid range. -C IER = 3 if an error was encountered in writing -C to unit LUN. -C -C The values in the data statement below may be altered -C in order to modify various plotting options. -C -C Modules required by TRPLOT: None -C -C Intrinsic functions called by TRPLOT: ABS, ATAN, COS, -C NINT, REAL, SIN, -C SQRT -C -C*********************************************************** -C - INTEGER IPX1, IPX2, IPY1, IPY2, IR, LP, LPL, N0, N1 - LOGICAL ANNOT - REAL CF, CT, EX, EY, EZ, FSIZN, FSIZT, R11, R12, - . R21, R22, R23, SF, T, TX, TY, WR, WRS, X0, X1, - . Y0, Y1, Z0, Z1 -C - DATA ANNOT/.TRUE./, FSIZN/10.0/, FSIZT/16.0/ -C -C Local parameters: -C -C ANNOT = Logical variable with value TRUE iff the plot -C is to be annotated with the values of ELAT, -C ELON, and A -C CF = Conversion factor for degrees to radians -C CT = Cos(ELAT) -C EX,EY,EZ = Cartesian coordinates of the eye-position E -C FSIZN = Font size in points for labeling nodes with -C their indexes if NUMBR = TRUE -C FSIZT = Font size in points for the title (and -C annotation if ANNOT = TRUE) -C IPX1,IPY1 = X and y coordinates (in points) of the lower -C left corner of the bounding box or viewport -C box -C IPX2,IPY2 = X and y coordinates (in points) of the upper -C right corner of the bounding box or viewport -C box -C IR = Half the width (height) of the bounding box or -C viewport box in points -- viewport radius -C LP = LIST index (pointer) -C LPL = Pointer to the last neighbor of N0 -C N0 = Index of a node whose incident arcs are to be -C drawn -C N1 = Neighbor of N0 -C R11...R23 = Components of the first two rows of a rotation -C that maps E to the north pole (0,0,1) -C SF = Scale factor for mapping world coordinates -C (window coordinates in [-WR,WR] X [-WR,WR]) -C to viewport coordinates in [IPX1,IPX2] X -C [IPY1,IPY2] -C T = Temporary variable -C TX,TY = Translation vector for mapping world coordi- -C nates to viewport coordinates -C WR = Window radius r = Sin(A) -C WRS = WR**2 -C X0,Y0,Z0 = Coordinates of N0 in the rotated coordinate -C system or label location (X0,Y0) -C X1,Y1,Z1 = Coordinates of N1 in the rotated coordinate -C system or intersection of edge N0-N1 with -C the equator (in the rotated coordinate -C system) -C -C -C Test for invalid parameters. -C - IF (LUN .LT. 0 .OR. LUN .GT. 99 .OR. - . PLTSIZ .LT. 1.0 .OR. PLTSIZ .GT. 8.5 .OR. - . N .LT. 3) - . GO TO 11 - IF (ABS(ELAT) .GT. 90.0 .OR. ABS(ELON) .GT. 180.0 - . .OR. A .GT. 90.0) GO TO 12 -C -C Compute a conversion factor CF for degrees to radians -C and compute the window radius WR. -C - CF = ATAN(1.0)/45.0 - WR = SIN(CF*A) - WRS = WR*WR -C -C Compute the lower left (IPX1,IPY1) and upper right -C (IPX2,IPY2) corner coordinates of the bounding box. -C The coordinates, specified in default user space units -C (points, at 72 points/inch with origin at the lower -C left corner of the page), are chosen to preserve the -C square aspect ratio, and to center the plot on the 8.5 -C by 11 inch page. The center of the page is (306,396), -C and IR = PLTSIZ/2 in points. -C - IR = NINT(36.0*PLTSIZ) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Output header comments. -C - WRITE (LUN,100,ERR=13) IPX1, IPY1, IPX2, IPY2 - 100 FORMAT ('%!PS-Adobe-3.0 EPSF-3.0'/ - . '%%BoundingBox:',4I4/ - . '%%Title: Triangulation'/ - . '%%Creator: STRIPACK'/ - . '%%EndComments') -C -C Set (IPX1,IPY1) and (IPX2,IPY2) to the corner coordinates -C of a viewport box obtained by shrinking the bounding box -C by 12% in each dimension. -C - IR = NINT(0.88*REAL(IR)) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Set the line thickness to 2 points, and draw the -C viewport boundary. -C - T = 2.0 - WRITE (LUN,110,ERR=13) T - WRITE (LUN,120,ERR=13) IR - WRITE (LUN,130,ERR=13) - 110 FORMAT (F12.6,' setlinewidth') - 120 FORMAT ('306 396 ',I3,' 0 360 arc') - 130 FORMAT ('stroke') -C -C Set up an affine mapping from the window box [-WR,WR] X -C [-WR,WR] to the viewport box. -C - SF = REAL(IR)/WR - TX = IPX1 + SF*WR - TY = IPY1 + SF*WR - WRITE (LUN,140,ERR=13) TX, TY, SF, SF - 140 FORMAT (2F12.6,' translate'/ - . 2F12.6,' scale') -C -C The line thickness must be changed to reflect the new -C scaling which is applied to all subsequent output. -C Set it to 1.0 point. -C - T = 0.25/SF - WRITE (LUN,110,ERR=13) T -C -C Save the current graphics state, and set the clip path to -C the boundary of the window. -C - WRITE (LUN,150,ERR=13) - WRITE (LUN,160,ERR=13) WR - WRITE (LUN,170,ERR=13) - 150 FORMAT ('gsave') - 160 FORMAT ('0 0 ',F12.6,' 0 360 arc') - 170 FORMAT ('clip newpath') -C -C Compute the Cartesian coordinates of E and the components -C of a rotation R which maps E to the north pole (0,0,1). -C R is taken to be a rotation about the z-axis (into the -C yz-plane) followed by a rotation about the x-axis chosen -C so that the view-up direction is (0,0,1), or (-1,0,0) if -C E is the north or south pole. -C -C ( R11 R12 0 ) -C R = ( R21 R22 R23 ) -C ( EX EY EZ ) -C - T = CF*ELON - CT = COS(CF*ELAT) - EX = CT*COS(T) - EY = CT*SIN(T) - EZ = SIN(CF*ELAT) - IF (CT .NE. 0.0) THEN - R11 = -EY/CT - R12 = EX/CT - ELSE - R11 = 0.0 - R12 = 1.0 - ENDIF - R21 = -EZ*R12 - R22 = EZ*R11 - R23 = CT -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 3 N0 = 1,N - Z0 = EX*X(N0) + EY*Y(N0) + EZ*Z(N0) - IF (Z0 .LT. 0.) GO TO 3 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 3 - LPL = LEND(N0) - LP = LPL -C -C Loop on neighbors N1 of N0. LPL points to the last -C neighbor of N0. Copy the components of N1 into P. -C - 1 LP = LPTR(LP) - N1 = ABS(LIST(LP)) - X1 = R11*X(N1) + R12*Y(N1) - Y1 = R21*X(N1) + R22*Y(N1) + R23*Z(N1) - Z1 = EX*X(N1) + EY*Y(N1) + EZ*Z(N1) - IF (Z1 .LT. 0.) THEN -C -C N1 is a 'southern hemisphere' point. Move it to the -C intersection of edge N0-N1 with the equator so that -C the edge is clipped properly. Z1 is implicitly set -C to 0. -C - X1 = Z0*X1 - Z1*X0 - Y1 = Z0*Y1 - Z1*Y0 - T = SQRT(X1*X1+Y1*Y1) - X1 = X1/T - Y1 = Y1/T - ENDIF -C -C If node N1 is in the window and N1 < N0, bypass edge -C N0->N1 (since edge N1->N0 has already been drawn). -C - IF ( Z1 .GE. 0.0 .AND. X1*X1 + Y1*Y1 .LE. WRS - . .AND. N1 .LT. N0 ) GO TO 2 -C -C Add the edge to the path. -C - WRITE (LUN,180,ERR=13) X0, Y0, X1, Y1 - 180 FORMAT (2F12.6,' moveto',2F12.6,' lineto') -C -C Bottom of loops. -C - 2 IF (LP .NE. LPL) GO TO 1 - 3 CONTINUE -C -C Paint the path and restore the saved graphics state (with -C no clip path). -C - WRITE (LUN,130,ERR=13) - WRITE (LUN,190,ERR=13) - 190 FORMAT ('grestore') - IF (NUMBR) THEN -C -C Nodes in the window are to be labeled with their indexes. -C Convert FSIZN from points to world coordinates, and -C output the commands to select a font and scale it. -C - T = FSIZN/SF - WRITE (LUN,200,ERR=13) T - 200 FORMAT ('/Helvetica findfont'/ - . F12.6,' scalefont setfont') -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 4 N0 = 1,N - IF (EX*X(N0) + EY*Y(N0) + EZ*Z(N0) .LT. 0.) - . GO TO 4 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 4 -C -C Move to (X0,Y0) and draw the label N0. The first char- -C acter will will have its lower left corner about one -C character width to the right of the nodal position. -C - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,220,ERR=13) N0 - 210 FORMAT (2F12.6,' moveto') - 220 FORMAT ('(',I3,') show') - 4 CONTINUE - ENDIF -C -C Convert FSIZT from points to world coordinates, and output -C the commands to select a font and scale it. -C - T = FSIZT/SF - WRITE (LUN,200,ERR=13) T -C -C Display TITLE centered above the plot: -C - Y0 = WR + 3.0*T - WRITE (LUN,230,ERR=13) TITLE, Y0 - 230 FORMAT (A80/' stringwidth pop 2 div neg ',F12.6, - . ' moveto') - WRITE (LUN,240,ERR=13) TITLE - 240 FORMAT (A80/' show') - IF (ANNOT) THEN -C -C Display the window center and radius below the plot. -C - X0 = -WR - Y0 = -WR - 50.0/SF - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,250,ERR=13) ELAT, ELON - Y0 = Y0 - 2.0*T - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,260,ERR=13) A - 250 FORMAT ('(Window center: ELAT = ',F7.2, - . ', ELON = ',F8.2,') show') - 260 FORMAT ('(Angular extent: A = ',F5.2,') show') - ENDIF -C -C Paint the path and output the showpage command and -C end-of-file indicator. -C - WRITE (LUN,270,ERR=13) - 270 FORMAT ('stroke'/ - . 'showpage'/ - . '%%EOF') -C -C HP's interpreters require a one-byte End-of-PostScript-Job -C indicator (to eliminate a timeout error message): -C ASCII 4. -C - WRITE (LUN,280,ERR=13) CHAR(4) - 280 FORMAT (A1) -C -C No error encountered. -C - IER = 0 - RETURN -C -C Invalid input parameter LUN, PLTSIZ, or N. -C - 11 IER = 1 - RETURN -C -C Invalid input parameter ELAT, ELON, or A. -C - 12 IER = 2 - RETURN -C -C Error writing to unit LUN. -C - 13 IER = 3 - RETURN - END SUBROUTINE - SUBROUTINE TRPRNT (N,X,Y,Z,IFLAG,LIST,LPTR,LEND,LOUT) - INTEGER N, IFLAG, LIST(*), LPTR(*), LEND(N), LOUT - REAL X(N), Y(N), Z(N) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/25/98 -C -C This subroutine prints the triangulation adjacency lists -C created by Subroutine TRMESH and, optionally, the nodal -C coordinates (either latitude and longitude or Cartesian -C coordinates) on logical unit LOUT. The list of neighbors -C of a boundary node is followed by index 0. The numbers of -C boundary nodes, triangles, and arcs are also printed. -C -C -C On input: -C -C N = Number of nodes in the triangulation. N .GE. 3 -C and N .LE. 9999. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes if IFLAG = 0, or -C (X and Y only) arrays of length N containing -C longitude and latitude, respectively, if -C IFLAG > 0, or unused dummy parameters if -C IFLAG < 0. -C -C IFLAG = Nodal coordinate option indicator: -C IFLAG = 0 if X, Y, and Z (assumed to contain -C Cartesian coordinates) are to be -C printed (to 6 decimal places). -C IFLAG > 0 if only X and Y (assumed to con- -C tain longitude and latitude) are -C to be printed (to 6 decimal -C places). -C IFLAG < 0 if only the adjacency lists are to -C be printed. -C -C LIST,LPTR,LEND = Data structure defining the trian- -C gulation. Refer to Subroutine -C TRMESH. -C -C LOUT = Logical unit for output. If LOUT is not in -C the range 0 to 99, output is written to -C logical unit 6. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C The adjacency lists and nodal coordinates (as specified -C by IFLAG) are written to unit LOUT. -C -C Modules required by TRPRNT: None -C -C*********************************************************** -C - INTEGER I, INC, K, LP, LPL, LUN, NA, NABOR(400), NB, - . ND, NL, NLMAX, NMAX, NODE, NN, NT - DATA NMAX/9999/, NLMAX/58/ -C -C Local parameters: -C -C I = NABOR index (1 to K) -C INC = Increment for NL associated with an adjacency list -C K = Counter and number of neighbors of NODE -C LP = LIST pointer of a neighbor of NODE -C LPL = Pointer to the last neighbor of NODE -C LUN = Logical unit for output (copy of LOUT) -C NA = Number of arcs in the triangulation -C NABOR = Array containing the adjacency list associated -C with NODE, with zero appended if NODE is a -C boundary node -C NB = Number of boundary nodes encountered -C ND = Index of a neighbor of NODE (or negative index) -C NL = Number of lines that have been printed on the -C current page -C NLMAX = Maximum number of print lines per page (except -C for the last page which may have two addi- -C tional lines) -C NMAX = Upper bound on N (allows 4-digit indexes) -C NODE = Index of a node and DO-loop index (1 to N) -C NN = Local copy of N -C NT = Number of triangles in the triangulation -C - NN = N - LUN = LOUT - IF (LUN .LT. 0 .OR. LUN .GT. 99) LUN = 6 -C -C Print a heading and test the range of N. -C - WRITE (LUN,100) NN - IF (NN .LT. 3 .OR. NN .GT. NMAX) THEN -C -C N is outside its valid range. -C - WRITE (LUN,110) - RETURN - ENDIF -C -C Initialize NL (the number of lines printed on the current -C page) and NB (the number of boundary nodes encountered). -C - NL = 6 - NB = 0 - IF (IFLAG .LT. 0) THEN -C -C Print LIST only. K is the number of neighbors of NODE -C that have been stored in NABOR. -C - WRITE (LUN,101) - DO 2 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 1 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 1 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. Correct the sign of the last -C neighbor, add 0 to the end of the list, and increment -C NB. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print the list of neighbors. -C - INC = (K-1)/14 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,104) NODE, (NABOR(I), I = 1,K) - IF (K .NE. 14) WRITE (LUN,107) - 2 CONTINUE - ELSEIF (IFLAG .GT. 0) THEN -C -C Print X (longitude), Y (latitude), and LIST. -C - WRITE (LUN,102) - DO 4 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 3 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 3 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print X, Y, and NABOR. -C - INC = (K-1)/8 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,105) NODE, X(NODE), Y(NODE), - . (NABOR(I), I = 1,K) - IF (K .NE. 8) WRITE (LUN,107) - 4 CONTINUE - ELSE -C -C Print X, Y, Z, and LIST. -C - WRITE (LUN,103) - DO 6 NODE = 1,NN - LPL = LEND(NODE) - LP = LPL - K = 0 -C - 5 K = K + 1 - LP = LPTR(LP) - ND = LIST(LP) - NABOR(K) = ND - IF (LP .NE. LPL) GO TO 5 - IF (ND .LE. 0) THEN -C -C NODE is a boundary node. -C - NABOR(K) = -ND - K = K + 1 - NABOR(K) = 0 - NB = NB + 1 - ENDIF -C -C Increment NL and print X, Y, Z, and NABOR. -C - INC = (K-1)/5 + 2 - NL = NL + INC - IF (NL .GT. NLMAX) THEN - WRITE (LUN,108) - NL = INC - ENDIF - WRITE (LUN,106) NODE, X(NODE), Y(NODE), - . Z(NODE), (NABOR(I), I = 1,K) - IF (K .NE. 5) WRITE (LUN,107) - 6 CONTINUE - ENDIF -C -C Print NB, NA, and NT (boundary nodes, arcs, and -C triangles). -C - IF (NB .NE. 0) THEN - NA = 3*NN - NB - 3 - NT = 2*NN - NB - 2 - ELSE - NA = 3*NN - 6 - NT = 2*NN - 4 - ENDIF - WRITE (LUN,109) NB, NA, NT - RETURN -C -C Print formats: -C - 100 FORMAT (///15X,'STRIPACK Triangulation Data ', - . 'Structure, N = ',I5//) - 101 FORMAT (1X,'Node',31X,'Neighbors of Node'//) - 102 FORMAT (1X,'Node',5X,'Longitude',6X,'Latitude', - . 18X,'Neighbors of Node'//) - 103 FORMAT (1X,'Node',5X,'X(Node)',8X,'Y(Node)',8X, - . 'Z(Node)',11X,'Neighbors of Node'//) - 104 FORMAT (1X,I4,4X,14I5/(1X,8X,14I5)) - 105 FORMAT (1X,I4,2E15.6,4X,8I5/(1X,38X,8I5)) - 106 FORMAT (1X,I4,3E15.6,4X,5I5/(1X,53X,5I5)) - 107 FORMAT (1X) - 108 FORMAT (///) - 109 FORMAT (/1X,'NB = ',I4,' Boundary Nodes',5X, - . 'NA = ',I5,' Arcs',5X,'NT = ',I5, - . ' Triangles') - 110 FORMAT (1X,10X,'*** N is outside its valid', - . ' range ***') - END SUBROUTINE - SUBROUTINE VRPLOT (LUN,PLTSIZ,ELAT,ELON,A,N,X,Y,Z, - . NT,LISTC,LPTR,LEND,XC,YC,ZC,TITLE, - . NUMBR, IER) - CHARACTER*(*) TITLE - INTEGER LUN, N, NT, LISTC(*), LPTR(*), LEND(N), IER - LOGICAL NUMBR - REAL PLTSIZ, ELAT, ELON, A, X(N), Y(N), Z(N), - . XC(NT), YC(NT), ZC(NT) -C -C*********************************************************** -C -C From STRIPACK -C Robert J. Renka -C Dept. of Computer Science -C Univ. of North Texas -C renka@cs.unt.edu -C 07/16/98 -C -C This subroutine creates a level-2 Encapsulated Post- -C script (EPS) file containing a graphical depiction of a -C Voronoi diagram of a set of nodes on the unit sphere. -C The visible vertices are projected onto the plane that -C contains the origin and has normal defined by a user- -C specified eye-position. Projections of adjacent (visible) -C Voronoi vertices are connected by line segments. -C -C The parameters defining the Voronoi diagram may be com- -C puted by Subroutine CRLIST. -C -C -C On input: -C -C LUN = Logical unit number in the range 0 to 99. -C The unit should be opened with an appropriate -C file name before the call to this routine. -C -C PLTSIZ = Plot size in inches. A circular window in -C the projection plane is mapped to a circu- -C lar viewport with diameter equal to .88* -C PLTSIZ (leaving room for labels outside the -C viewport). The viewport is centered on the -C 8.5 by 11 inch page, and its boundary is -C drawn. 1.0 .LE. PLTSIZ .LE. 8.5. -C -C ELAT,ELON = Latitude and longitude (in degrees) of -C the center of projection E (the center -C of the plot). The projection plane is -C the plane that contains the origin and -C has E as unit normal. In a rotated -C coordinate system for which E is the -C north pole, the projection plane con- -C tains the equator, and only northern -C hemisphere points are visible (from the -C point at infinity in the direction E). -C These are projected orthogonally onto -C the projection plane (by zeroing the z- -C component in the rotated coordinate -C system). ELAT and ELON must be in the -C range -90 to 90 and -180 to 180, respec- -C tively. -C -C A = Angular distance in degrees from E to the boun- -C dary of a circular window against which the -C Voronoi diagram is clipped. The projected win- -C dow is a disk of radius r = Sin(A) centered at -C the origin, and only visible vertices whose -C projections are within distance r of the origin -C are included in the plot. Thus, if A = 90, the -C plot includes the entire hemisphere centered at -C E. 0 .LT. A .LE. 90. -C -C N = Number of nodes (Voronoi centers) and Voronoi -C regions. N .GE. 3. -C -C X,Y,Z = Arrays of length N containing the Cartesian -C coordinates of the nodes (unit vectors). -C -C NT = Number of Voronoi region vertices (triangles, -C including those in the extended triangulation -C if the number of boundary nodes NB is nonzero): -C NT = 2*N-4. -C -C LISTC = Array of length 3*NT containing triangle -C indexes (indexes to XC, YC, and ZC) stored -C in 1-1 correspondence with LIST/LPTR entries -C (or entries that would be stored in LIST for -C the extended triangulation): the index of -C triangle (N1,N2,N3) is stored in LISTC(K), -C LISTC(L), and LISTC(M), where LIST(K), -C LIST(L), and LIST(M) are the indexes of N2 -C as a neighbor of N1, N3 as a neighbor of N2, -C and N1 as a neighbor of N3. The Voronoi -C region associated with a node is defined by -C the CCW-ordered sequence of circumcenters in -C one-to-one correspondence with its adjacency -C list (in the extended triangulation). -C -C LPTR = Array of length 3*NT = 6*N-12 containing a -C set of pointers (LISTC indexes) in one-to-one -C correspondence with the elements of LISTC. -C LISTC(LPTR(I)) indexes the triangle which -C follows LISTC(I) in cyclical counterclockwise -C order (the first neighbor follows the last -C neighbor). -C -C LEND = Array of length N containing a set of -C pointers to triangle lists. LP = LEND(K) -C points to a triangle (indexed by LISTC(LP)) -C containing node K for K = 1 to N. -C -C XC,YC,ZC = Arrays of length NT containing the -C Cartesian coordinates of the triangle -C circumcenters (Voronoi vertices). -C XC(I)**2 + YC(I)**2 + ZC(I)**2 = 1. -C -C TITLE = Type CHARACTER variable or constant contain- -C ing a string to be centered above the plot. -C The string must be enclosed in parentheses; -C i.e., the first and last characters must be -C '(' and ')', respectively, but these are not -C displayed. TITLE may have at most 80 char- -C acters including the parentheses. -C -C NUMBR = Option indicator: If NUMBR = TRUE, the -C nodal indexes are plotted at the Voronoi -C region centers. -C -C Input parameters are not altered by this routine. -C -C On output: -C -C IER = Error indicator: -C IER = 0 if no errors were encountered. -C IER = 1 if LUN, PLTSIZ, N, or NT is outside -C its valid range. -C IER = 2 if ELAT, ELON, or A is outside its -C valid range. -C IER = 3 if an error was encountered in writing -C to unit LUN. -C -C Modules required by VRPLOT: None -C -C Intrinsic functions called by VRPLOT: ABS, ATAN, COS, -C NINT, REAL, SIN, -C SQRT -C -C*********************************************************** -C - INTEGER IPX1, IPX2, IPY1, IPY2, IR, KV1, KV2, LP, LPL, - . N0 - LOGICAL ANNOT, IN1, IN2 - REAL CF, CT, EX, EY, EZ, FSIZN, FSIZT, R11, R12, - . R21, R22, R23, SF, T, TX, TY, WR, WRS, X0, X1, - . X2, Y0, Y1, Y2, Z1, Z2 -C - DATA ANNOT/.TRUE./, FSIZN/10.0/, FSIZT/16.0/ -C -C Local parameters: -C -C ANNOT = Logical variable with value TRUE iff the plot -C is to be annotated with the values of ELAT, -C ELON, and A -C CF = Conversion factor for degrees to radians -C CT = Cos(ELAT) -C EX,EY,EZ = Cartesian coordinates of the eye-position E -C FSIZN = Font size in points for labeling nodes with -C their indexes if NUMBR = TRUE -C FSIZT = Font size in points for the title (and -C annotation if ANNOT = TRUE) -C IN1,IN2 = Logical variables with value TRUE iff the -C projections of vertices KV1 and KV2, respec- -C tively, are inside the window -C IPX1,IPY1 = X and y coordinates (in points) of the lower -C left corner of the bounding box or viewport -C box -C IPX2,IPY2 = X and y coordinates (in points) of the upper -C right corner of the bounding box or viewport -C box -C IR = Half the width (height) of the bounding box or -C viewport box in points -- viewport radius -C KV1,KV2 = Endpoint indexes of a Voronoi edge -C LP = LIST index (pointer) -C LPL = Pointer to the last neighbor of N0 -C N0 = Index of a node -C R11...R23 = Components of the first two rows of a rotation -C that maps E to the north pole (0,0,1) -C SF = Scale factor for mapping world coordinates -C (window coordinates in [-WR,WR] X [-WR,WR]) -C to viewport coordinates in [IPX1,IPX2] X -C [IPY1,IPY2] -C T = Temporary variable -C TX,TY = Translation vector for mapping world coordi- -C nates to viewport coordinates -C WR = Window radius r = Sin(A) -C WRS = WR**2 -C X0,Y0 = Projection plane coordinates of node N0 or -C label location -C X1,Y1,Z1 = Coordinates of vertex KV1 in the rotated -C coordinate system -C X2,Y2,Z2 = Coordinates of vertex KV2 in the rotated -C coordinate system or intersection of edge -C KV1-KV2 with the equator (in the rotated -C coordinate system) -C -C -C Test for invalid parameters. -C - IF (LUN .LT. 0 .OR. LUN .GT. 99 .OR. - . PLTSIZ .LT. 1.0 .OR. PLTSIZ .GT. 8.5 .OR. - . N .LT. 3 .OR. NT .NE. 2*N-4) - . GO TO 11 - IF (ABS(ELAT) .GT. 90.0 .OR. ABS(ELON) .GT. 180.0 - . .OR. A .GT. 90.0) GO TO 12 -C -C Compute a conversion factor CF for degrees to radians -C and compute the window radius WR. -C - CF = ATAN(1.0)/45.0 - WR = SIN(CF*A) - WRS = WR*WR -C -C Compute the lower left (IPX1,IPY1) and upper right -C (IPX2,IPY2) corner coordinates of the bounding box. -C The coordinates, specified in default user space units -C (points, at 72 points/inch with origin at the lower -C left corner of the page), are chosen to preserve the -C square aspect ratio, and to center the plot on the 8.5 -C by 11 inch page. The center of the page is (306,396), -C and IR = PLTSIZ/2 in points. -C - IR = NINT(36.0*PLTSIZ) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Output header comments. -C - WRITE (LUN,100,ERR=13) IPX1, IPY1, IPX2, IPY2 - 100 FORMAT ('%!PS-Adobe-3.0 EPSF-3.0'/ - . '%%BoundingBox:',4I4/ - . '%%Title: Voronoi diagram'/ - . '%%Creator: STRIPACK'/ - . '%%EndComments') -C -C Set (IPX1,IPY1) and (IPX2,IPY2) to the corner coordinates -C of a viewport box obtained by shrinking the bounding box -C by 12% in each dimension. -C - IR = NINT(0.88*REAL(IR)) - IPX1 = 306 - IR - IPX2 = 306 + IR - IPY1 = 396 - IR - IPY2 = 396 + IR -C -C Set the line thickness to 2 points, and draw the -C viewport boundary. -C - T = 2.0 - WRITE (LUN,110,ERR=13) T - WRITE (LUN,120,ERR=13) IR - WRITE (LUN,130,ERR=13) - 110 FORMAT (F12.6,' setlinewidth') - 120 FORMAT ('306 396 ',I3,' 0 360 arc') - 130 FORMAT ('stroke') -C -C Set up an affine mapping from the window box [-WR,WR] X -C [-WR,WR] to the viewport box. -C - SF = REAL(IR)/WR - TX = IPX1 + SF*WR - TY = IPY1 + SF*WR - WRITE (LUN,140,ERR=13) TX, TY, SF, SF - 140 FORMAT (2F12.6,' translate'/ - . 2F12.6,' scale') -C -C The line thickness must be changed to reflect the new -C scaling which is applied to all subsequent output. -C Set it to 1.0 point. -C - T = 1.0/SF - WRITE (LUN,110,ERR=13) T -C -C Save the current graphics state, and set the clip path to -C the boundary of the window. -C - WRITE (LUN,150,ERR=13) - WRITE (LUN,160,ERR=13) WR - WRITE (LUN,170,ERR=13) - 150 FORMAT ('gsave') - 160 FORMAT ('0 0 ',F12.6,' 0 360 arc') - 170 FORMAT ('clip newpath') -C -C Compute the Cartesian coordinates of E and the components -C of a rotation R which maps E to the north pole (0,0,1). -C R is taken to be a rotation about the z-axis (into the -C yz-plane) followed by a rotation about the x-axis chosen -C so that the view-up direction is (0,0,1), or (-1,0,0) if -C E is the north or south pole. -C -C ( R11 R12 0 ) -C R = ( R21 R22 R23 ) -C ( EX EY EZ ) -C - T = CF*ELON - CT = COS(CF*ELAT) - EX = CT*COS(T) - EY = CT*SIN(T) - EZ = SIN(CF*ELAT) - IF (CT .NE. 0.0) THEN - R11 = -EY/CT - R12 = EX/CT - ELSE - R11 = 0.0 - R12 = 1.0 - ENDIF - R21 = -EZ*R12 - R22 = EZ*R11 - R23 = CT -C -C Loop on nodes (Voronoi centers) N0. -C LPL indexes the last neighbor of N0. -C - DO 3 N0 = 1,N - LPL = LEND(N0) -C -C Set KV2 to the first (and last) vertex index and compute -C its coordinates (X2,Y2,Z2) in the rotated coordinate -C system. -C - KV2 = LISTC(LPL) - X2 = R11*XC(KV2) + R12*YC(KV2) - Y2 = R21*XC(KV2) + R22*YC(KV2) + R23*ZC(KV2) - Z2 = EX*XC(KV2) + EY*YC(KV2) + EZ*ZC(KV2) -C -C IN2 = TRUE iff KV2 is in the window. -C - IN2 = Z2 .GE. 0. .AND. X2*X2 + Y2*Y2 .LE. WRS -C -C Loop on neighbors N1 of N0. For each triangulation edge -C N0-N1, KV1-KV2 is the corresponding Voronoi edge. -C - LP = LPL - 1 LP = LPTR(LP) - KV1 = KV2 - X1 = X2 - Y1 = Y2 - Z1 = Z2 - IN1 = IN2 - KV2 = LISTC(LP) -C -C Compute the new values of (X2,Y2,Z2) and IN2. -C - X2 = R11*XC(KV2) + R12*YC(KV2) - Y2 = R21*XC(KV2) + R22*YC(KV2) + R23*ZC(KV2) - Z2 = EX*XC(KV2) + EY*YC(KV2) + EZ*ZC(KV2) - IN2 = Z2 .GE. 0. .AND. X2*X2 + Y2*Y2 .LE. WRS -C -C Add edge KV1-KV2 to the path iff both endpoints are inside -C the window and KV2 > KV1, or KV1 is inside and KV2 is -C outside (so that the edge is drawn only once). -C - IF (.NOT. IN1 .OR. (IN2 .AND. KV2 .LE. KV1)) - . GO TO 2 - IF (Z2 .LT. 0.) THEN -C -C KV2 is a 'southern hemisphere' point. Move it to the -C intersection of edge KV1-KV2 with the equator so that -C the edge is clipped properly. Z2 is implicitly set -C to 0. -C - X2 = Z1*X2 - Z2*X1 - Y2 = Z1*Y2 - Z2*Y1 - T = SQRT(X2*X2+Y2*Y2) - X2 = X2/T - Y2 = Y2/T - ENDIF - WRITE (LUN,180,ERR=13) X1, Y1, X2, Y2 - 180 FORMAT (2F12.6,' moveto',2F12.6,' lineto') -C -C Bottom of loops. -C - 2 IF (LP .NE. LPL) GO TO 1 - 3 CONTINUE -C -C Paint the path and restore the saved graphics state (with -C no clip path). -C - WRITE (LUN,130,ERR=13) - WRITE (LUN,190,ERR=13) - 190 FORMAT ('grestore') - IF (NUMBR) THEN -C -C Nodes in the window are to be labeled with their indexes. -C Convert FSIZN from points to world coordinates, and -C output the commands to select a font and scale it. -C - T = FSIZN/SF - WRITE (LUN,200,ERR=13) T - 200 FORMAT ('/Helvetica findfont'/ - . F12.6,' scalefont setfont') -C -C Loop on visible nodes N0 that project to points (X0,Y0) in -C the window. -C - DO 4 N0 = 1,N - IF (EX*X(N0) + EY*Y(N0) + EZ*Z(N0) .LT. 0.) - . GO TO 4 - X0 = R11*X(N0) + R12*Y(N0) - Y0 = R21*X(N0) + R22*Y(N0) + R23*Z(N0) - IF (X0*X0 + Y0*Y0 .GT. WRS) GO TO 4 -C -C Move to (X0,Y0), and draw the label N0 with the origin -C of the first character at (X0,Y0). -C - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,220,ERR=13) N0 - 210 FORMAT (2F12.6,' moveto') - 220 FORMAT ('(',I3,') show') - 4 CONTINUE - ENDIF -C -C Convert FSIZT from points to world coordinates, and output -C the commands to select a font and scale it. -C - T = FSIZT/SF - WRITE (LUN,200,ERR=13) T -C -C Display TITLE centered above the plot: -C - Y0 = WR + 3.0*T - WRITE (LUN,230,ERR=13) TITLE, Y0 - 230 FORMAT (A80/' stringwidth pop 2 div neg ',F12.6, - . ' moveto') - WRITE (LUN,240,ERR=13) TITLE - 240 FORMAT (A80/' show') - IF (ANNOT) THEN -C -C Display the window center and radius below the plot. -C - X0 = -WR - Y0 = -WR - 50.0/SF - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,250,ERR=13) ELAT, ELON - Y0 = Y0 - 2.0*T - WRITE (LUN,210,ERR=13) X0, Y0 - WRITE (LUN,260,ERR=13) A - 250 FORMAT ('(Window center: ELAT = ',F7.2, - . ', ELON = ',F8.2,') show') - 260 FORMAT ('(Angular extent: A = ',F5.2,') show') - ENDIF -C -C Paint the path and output the showpage command and -C end-of-file indicator. -C - WRITE (LUN,270,ERR=13) - 270 FORMAT ('stroke'/ - . 'showpage'/ - . '%%EOF') -C -C HP's interpreters require a one-byte End-of-PostScript-Job -C indicator (to eliminate a timeout error message): -C ASCII 4. -C - WRITE (LUN,280,ERR=13) CHAR(4) - 280 FORMAT (A1) -C -C No error encountered. -C - IER = 0 - RETURN -C -C Invalid input parameter LUN, PLTSIZ, N, or NT. -C - 11 IER = 1 - RETURN -C -C Invalid input parameter ELAT, ELON, or A. -C - 12 IER = 2 - RETURN -C -C Error writing to unit LUN. -C - 13 IER = 3 - RETURN - END SUBROUTINE - - END MODULE STRIPACK diff --git a/grid_gen/global_scvt/src/grid_gen.F b/grid_gen/global_scvt/src/grid_gen.F deleted file mode 100644 index 34d64bb50..000000000 --- a/grid_gen/global_scvt/src/grid_gen.F +++ /dev/null @@ -1,157 +0,0 @@ -program grid_gen - - use grid_params - use grid_constants - use data_types - use grid_gen_utils - use voronoi_utils - use stripack - use scvt - use grid_meta - - implicit none - - real :: dlat, dlon - real :: dl - real :: d1, d2, d3, d4 - integer :: p1, p2, p3 - integer :: if - character (len=80) :: frame_name - - real :: pi - real :: area_per_sample, nhexs, sum_nhexs, hex_area - type (geo_point) :: p - - integer :: i, j, k, nb, ier - - real, allocatable, dimension(:) :: rlat, rlon, vclat, vclon, x, y, z, xc, yc, zc - integer, allocatable, dimension(:) :: list, lptr, listc, lend - integer, allocatable, dimension(:,:) :: ltri - - integer :: n, nrow, ntmax, nvc - integer :: tr1, tr2, tr3, tr4 - integer :: i1, i2, i3, k1, k2 - - type (adjacency_list) :: alist, clist - - real :: lat1, lon1, lat2, lon2, lat3, lon3, latc, lonc - - - call read_namelist() - - - pi = 4.0*atan(1.0) - - area_per_sample = 4.0 * pi * 6370000**2.0 / 6000000.0 - sum_nhexs = 0.0 - write(0,'(a,f10.1)') 'Computing an estimate for the required number of cells to reach dx=', min_dx - do if = 1,5 - nhexs = 0.0 - do i=1,6000000 - call random_point(p) - d1 = density_for_point(p) - dl = min_dx / (d1 ** 0.25) - hex_area = sqrt(3.0) / 2.0 * dl**2.0 - nhexs = nhexs + area_per_sample / hex_area - end do -! write(0,'(a,i2,a,i)') 'Estimate ',if,' for required # hexs:', nint(nhexs) - sum_nhexs = sum_nhexs + nhexs - write(0,'(a,i3,a)',advance='no') ' ...',if*20,'%' - end do - write(0,*) ' ' - write(0,*) 'Estimated # hexs:', nint(sum_nhexs/5.0) - write(0,*) ' ' - - - n = np - - ntmax = 6*n - nvc = ntmax - nrow = 6 - allocate(rlat(n)) - allocate(rlon(n)) - allocate(x(n)) - allocate(y(n)) - allocate(z(n)) - allocate(ltri(nrow,ntmax)) - allocate(list(nvc)) - allocate(lptr(nvc)) - allocate(lend(n)) - allocate(listc(nvc)) - allocate(vclat(nvc)) - allocate(vclon(nvc)) - allocate(xc(nvc)) - allocate(yc(nvc)) - allocate(zc(nvc)) - - - write(0,*) ' ' - write(0,*) 'Reading generating points from a file' - open(22,file='locs.dat',form='formatted',status='old') - if (locs_as_xyz) then - read(22,*) - do i=1,n - read(22,'(10x,f22.10,f23.10,f23.10)') x(i), y(i), z(i) - end do - call trans_inv(x, y, z, rlat, rlon, n) - else - do i=1,n - read(22,'(f13.10,1x,f13.10)') rlat(i), rlon(i) - end do - end if - close(22) - - - ! - ! Compute Voronoi corners - ! - write(0,*) ' ' - write(0,*) 'Computing Voronoi corners' - call compute_vc(rlat, rlon, n, nrow, ntmax, list, lptr, lend, listc, vclat, vclon, nvc) - - - ! - ! Form SCVT - ! - call TRANS (n, rlat, rlon, x, y, z) - call TRANS (nvc, vclat, vclon, xc, yc, zc) - write(frame_name,'(a)') 'scvt_initial.ps' - open(32,file=trim(frame_name),form='formatted',status='unknown') - call vrplot(32, 8.0, 0.0, 0.0, 90.0 ,N, X,Y,Z, 2*n-4,LISTC,LPTR,LEND,XC,YC,ZC,'(spherical centroidal voronoi tessellation)',.false.,IER) - close(32) - - call scvt_solve(n, lend, rlat, rlon, nvc, list, lptr, if) - - call compute_vc(rlat, rlon, n, nrow, ntmax, list, lptr, lend, listc, vclat, vclon, nvc) - call TRANS (n, rlat, rlon, x, y, z) - call TRANS (nvc, vclat, vclon, xc, yc, zc) - write(frame_name,'(a)') 'scvt_final.ps' - open(32,file=trim(frame_name),form='formatted',status='unknown') - call vrplot(32, 8.0, 0.0, 0.0, 90.0 ,N, X,Y,Z, 2*n-4,LISTC,LPTR,LEND,XC,YC,ZC,'(spherical centroidal voronoi tessellation)',.false.,IER) - close(32) - - - write(0,*) ' ' - write(0,*) 'Deriving grid metadata and writing output' - call convert_adjacency_list(n, lend, nvc, list, lptr, alist) - call convert_corner_list(n, lend, nvc, listc, lptr, clist) - call write_grid(rlat, rlon, n, vclat, vclon, nvc, alist, clist) - - - deallocate(rlat) - deallocate(x) - deallocate(y) - deallocate(z) - deallocate(rlon) - deallocate(ltri) - deallocate(list) - deallocate(lptr) - deallocate(lend) - deallocate(listc) - deallocate(vclat) - deallocate(vclon) - deallocate(xc) - deallocate(yc) - deallocate(zc) - -end program grid_gen diff --git a/grid_gen/global_scvt/src/module_data_types.F b/grid_gen/global_scvt/src/module_data_types.F deleted file mode 100644 index 011bd8fbf..000000000 --- a/grid_gen/global_scvt/src/module_data_types.F +++ /dev/null @@ -1,206 +0,0 @@ -module data_types - - integer, parameter :: LESS = -1, EQUAL = 0, GREATER = 1 - - type geo_point - real :: lat, lon - end type geo_point - - type send_list_ptr - integer :: nodeID - integer :: nNodeList - integer, pointer, dimension(:) :: nodeList - type (send_list_ptr), pointer :: next - end type send_list_ptr - - type recv_list_ptr - integer :: nodeID - integer :: nNodeList - integer, pointer, dimension(:) :: nodeList - type (recv_list_ptr), pointer :: next - end type recv_list_ptr - - type adjacency_list - integer :: nNodes - integer :: nNeighbors - integer, pointer, dimension(:) :: neighbor, start, len - end type adjacency_list - - type binary_tree - integer :: node1, node2 - integer :: vertex1, vertex2 - real :: lat1, lon1, lat2, lon2 - type (binary_tree), pointer :: left, right, parent - end type binary_tree - - contains - - - integer function cmp_points(a, b) - - implicit none - - type (geo_point), intent(in) :: a, b - - if (a%lat > b%lat) then - cmp_points = GREATER - else if (a%lat == b%lat) then - if (a%lon > b%lon) then - cmp_points = GREATER - else if (a%lon == b%lon) then - cmp_points = EQUAL - else - cmp_points = LESS - end if - else - cmp_points = LESS - end if - - end function cmp_points - - - subroutine swap_points(a, b) - - implicit none - - type (geo_point), intent(inout) :: a, b - - type (geo_point) :: temp - - temp = a - a = b - b = temp - - end subroutine swap_points - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE CONVERT_ADJACENCY_LIST - ! - ! Convert adjacency list from format provided by STRIPACK to format used in - ! our code. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine convert_adjacency_list(n, lend, nvc, list, lptr, alist) - - implicit none - - integer, intent(in) :: n, nvc - integer, dimension(n), intent(in) :: lend - integer, dimension(nvc), intent(in) :: lptr - integer, dimension(nvc), intent(in) :: list - type (adjacency_list), intent(inout) :: alist - - integer :: i, j, k, len, ipos - - len = 0 - - ! Count total number of nodes - do i=1,n - - ! Scan neighbors of i - k = lend(i) - k = lptr(lend(i)) - len = len + 1 - - do while (k /= lend(i)) - k = lptr(k) - len = len + 1 - end do - - end do - - alist % nNodes = n - alist % nNeighbors = len - allocate(alist % neighbor(len)) - allocate(alist % start(n)) - allocate(alist % len(n)) - - ipos = 0 - do i=1,n - - ! Scan neighbors of i - k = lend(i) - k = lptr(lend(i)) - ipos = ipos + 1 - - alist % start(i) = ipos - alist % neighbor(ipos) = list(k) - alist % len(i) = 1 - - do while (k /= lend(i)) - k = lptr(k) - ipos = ipos + 1 - - alist % neighbor(ipos) = list(k) - alist % len(i) = alist % len(i) + 1 - end do - end do - - end subroutine convert_adjacency_list - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE CONVERT_CORNER_LIST - ! - ! Convert VC list from format provided by STRIPACK to format used in - ! our code. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine convert_corner_list(n, lend, nvc, listc, lptr, clist) - - implicit none - - integer, intent(in) :: n, nvc - integer, dimension(n), intent(in) :: lend - integer, dimension(nvc), intent(in) :: lptr - integer, dimension(nvc), intent(in) :: listc - type (adjacency_list), intent(inout) :: clist - - integer :: i, j, k, len, ipos - - len = 0 - - ! Count total number of nodes - do i=1,n - - ! Scan neighbors of i - k = lend(i) - k = lptr(lend(i)) - len = len + 1 - - do while (k /= lend(i)) - k = lptr(k) - len = len + 1 - end do - - end do - - clist % nNodes = n - clist % nNeighbors = len - allocate(clist % neighbor(len)) - allocate(clist % start(n)) - allocate(clist % len(n)) - - ipos = 0 - do i=1,n - - ! Scan neighbors of i - k = lend(i) - k = lptr(lend(i)) - ipos = ipos + 1 - - clist % start(i) = ipos - clist % neighbor(ipos) = listc(k) - clist % len(i) = 1 - - do while (k /= lend(i)) - k = lptr(k) - ipos = ipos + 1 - - clist % neighbor(ipos) = listc(k) - clist % len(i) = clist % len(i) + 1 - end do - end do - - end subroutine convert_corner_list - -end module data_types diff --git a/grid_gen/global_scvt/src/module_grid_constants.F b/grid_gen/global_scvt/src/module_grid_constants.F deleted file mode 100644 index b23a3d3ef..000000000 --- a/grid_gen/global_scvt/src/module_grid_constants.F +++ /dev/null @@ -1,11 +0,0 @@ -module grid_constants - - real, parameter :: radius = 1. - real, parameter :: pii = 3.141592653589793 - real, parameter :: rtod = 180./pii - real, parameter :: s_area = 4.*pii*radius*radius - - integer, parameter :: nicos_vertices = 12 - integer, parameter :: nicos_neighbors = 5 - -end module grid_constants diff --git a/grid_gen/global_scvt/src/module_grid_gen_utils.F b/grid_gen/global_scvt/src/module_grid_gen_utils.F deleted file mode 100644 index 80c0fbd52..000000000 --- a/grid_gen/global_scvt/src/module_grid_gen_utils.F +++ /dev/null @@ -1,309 +0,0 @@ -module grid_gen_utils - - use sphere_utilities - - - contains - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE GENERATE_BASE_GRID - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine generate_base_grid(points, neighbors, triangles) - - use data_types - use grid_params - use grid_constants - - implicit none - - type (geo_point), dimension(nicos_vertices), intent(out) :: points - integer, dimension(nicos_neighbors,nicos_vertices), intent(out) :: neighbors - integer, dimension(3,20), intent(out) :: triangles - - real dl,dtri - integer im1, ii, ip1, i - - !-- set point positions - - dl = 2.*pii/5. - dtri = acos(cos(0.5*dl)/(1.+cos(0.5*dl))) - - points(1)%lat = pii/2. ! latitude of north pole point - points(1)%lon = 0. ! longitude of north pole point - - ! - ! Set points just south of the north pole - ! - do i=2,6 - points(i)%lat = pii/2. - dtri ! latitude, same for all 5 point - points(i)%lon = 0. + dl*float(i-2) ! longitude - enddo - - points(12)%lat = -pii/2. ! latitude of south pole point - points(12)%lon = 0. ! longitude of south pole point - - do i=7,11 ! the set points just north of the south pole - points(i)%lat = -pii/2. + dtri ! latitude, same for all 5 point - points(i)%lon = 0. + dl*(0.5+float(i-7)) ! longitude. shifted from north - enddo - - ! construct neighbors - - do i=1,5 - neighbors(i,1) = i+1 ! north pole neighbors, points 2 through 6 - neighbors(i,12) = i+6 ! south pole neighbors, points 7 through 11 - neighbors(1,i+1) = 1 ! add north pole point - neighbors(1,i+6) = 12 - enddo - - do i=2,6 - im1 = i-1 - ip1 = i+1 - if(im1 .lt. 2) im1 = 6 - if(ip1 .gt. 6) ip1 = 2 - neighbors(2,i) = im1 - neighbors(3,i) = ip1 - - im1 = i+4 - ii = i+5 - if(im1 .lt. 7) im1 = 11 - neighbors(4,i) = im1 - neighbors(5,i) = ii - - enddo - - do i=7,11 - im1 = i-1 - ip1 = i+1 - if(im1 .lt. 7) im1 = 11 - if(ip1 .gt. 11) ip1 = 7 - neighbors(2,i) = im1 - neighbors(3,i) = ip1 - - im1 = i-5 - ii = i-4 - if(ii .gt. 6) ii = 2 - neighbors(4,i) = im1 - neighbors(5,i) = ii - enddo - - ! set the triangle points - - do i=1,5 ! triangles with north pole points - triangles(1,i) = 1 - triangles(2,i) = i+1 - triangles(3,i) = i+2 - if(triangles(2,i) .gt. 6) triangles(2,i) = triangles(2,i) - 5 - if(triangles(3,i) .gt. 6) triangles(3,i) = triangles(3,i) - 5 - enddo - - do i=16,20 ! triangles with south pole points - triangles(1,i) = 12 - triangles(2,i) = i-9 - triangles(3,i) = i-8 - if(triangles(2,i) .gt. 11) triangles(2,i) = triangles(2,i) - 5 - if(triangles(3,i) .gt. 11) triangles(3,i) = triangles(3,i) - 5 - enddo - - do i=6,10 ! upward pointing equator triangles - triangles(1,i) = i-4 - triangles(2,i) = i+5 - triangles(3,i) = i+1 - if(triangles(2,i) .gt. 11) triangles(2,i) = triangles(2,i) - 5 - if(triangles(3,i) .gt. 11) triangles(3,i) = triangles(3,i) - 5 - enddo - - do i=11,15 ! downward pointing equator triangles - triangles(1,i) = i-4 - triangles(2,i) = i-9 - triangles(3,i) = i-8 - if(triangles(2,i) .gt. 6) triangles(2,i) = triangles(2,i) - 5 - if(triangles(3,i) .gt. 6) triangles(3,i) = triangles(3,i) - 5 - enddo - - write(6,*) ' ' - write(6,*) ' triangle nodes ' - do i=1,20 - write(6,*) i, triangles(1,i),triangles(2,i),triangles(3,i) - enddo - - end subroutine generate_base_grid - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE PROCESS_TRIANGLE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine process_triangle(triangle, scalar, nmax, n, np) - - use data_types - - implicit none - - integer, intent(in) :: nmax, np - integer, intent(inout) :: n - type (geo_point), dimension(3), intent(in) :: triangle - type (geo_point), dimension(nmax), intent(inout) :: scalar - - integer :: i, j, k - type (geo_point), dimension(np) :: ab, ac, pline - - call great_circle_points(triangle(1), triangle(2), ab, np) - call great_circle_points(triangle(1), triangle(3), ac, np) - - do i=1,np - call add_point(scalar, nmax, n, ab(i)) - call add_point(scalar, nmax, n, ac(i)) - end do - - do i=3,np-1 - call great_circle_points(ab(i), ac(i), pline, i) - do j=2,i-1 - n = n + 1 - scalar(n) = pline(j) -!!! No need to check for points on the interior of the triangle -!!! call add_point(scalar, nmax, n, pline(j)) - end do - end do - - i=np - call great_circle_points(ab(i), ac(i), pline, i) - do j=2,i-1 - call add_point(scalar, nmax, n, pline(j)) - end do - - end subroutine process_triangle - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE ADD_POINT - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine add_point(array, nmax, n, point) - - use data_types - - implicit none - - integer, intent(in) :: nmax - integer, intent(inout) :: n - type (geo_point), intent(in) :: point - type (geo_point), dimension(nmax), intent(inout) :: array - - integer :: i - logical :: already_have - - already_have = .false. - - do i=1,n - if (abs(point%lat - array(i)%lat) < 0.00001 .and. & - abs(point%lon - array(i)%lon) < 0.00001) then - already_have = .true. - exit - end if - end do - - if (.not. already_have) then - n = n + 1 - array(n) = point - end if - - end subroutine add_point - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_H_AREA - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_h_area(corners, centers, areas, n) - - use data_types - - implicit none - - integer, intent(in) :: n - type (geo_point), dimension(6,2*n-1,n), intent(inout) :: corners - type (geo_point), dimension(2*n-1,n), intent(inout) :: centers - real, dimension(2*n-1,n), intent(inout) :: areas - - integer :: i, j, it, itp1 - real :: hex_area - - do j=1,n - do i=1,2*n-1 - hex_area = 0. - do it=1,6 ! 6 triangles in the hexagon - itp1 = it+1 - if(itp1 > 6) itp1 = 1 - hex_area = hex_area + triangle_area( centers(i,j), & - corners( it,i,j), & - corners(itp1,i,j), 1.) - enddo - areas(i,j) = hex_area - enddo - enddo - - end subroutine compute_h_area - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_EDGE_LENGTHS - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_edge_lengths(corners, edge_lengths, n) - - use data_types - - implicit none - - integer, intent(in) :: n - type (geo_point), dimension(6,2*n-1,n), intent(inout) :: corners - real, dimension(6,2*n-1,n), intent(inout) :: edge_lengths - - integer :: i, j, it, itp1 - - do j=1,n - do i=1,2*n-1 - do it=1,6 - itp1 = it+1 - if(itp1 > 6) itp1 = 1 - edge_lengths(it,i,j) = sphere_distance( corners( it,i,j), & - corners(itp1,i,j), 1.) - end do - end do - end do - - end subroutine compute_edge_lengths - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_DX - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_dx( centers, dx, n ) - - use data_types - - implicit none - - integer, intent(in) :: n - type (geo_point), dimension(0:2*n,0:n+1), intent(inout) :: centers - real, dimension(3,2*n-1,n), intent(inout) :: dx - - integer :: i, j - - do j=1,n - do i=1,2*n-1 - dx(1,i,j) = sphere_distance( centers(i ,j ), & - centers(i-1,j ), 1. ) - dx(2,i,j) = sphere_distance( centers(i ,j ), & - centers(i-1,j-1), 1. ) - dx(3,i,j) = sphere_distance( centers(i ,j ), & - centers(i ,j-1), 1. ) - enddo - enddo - - end subroutine compute_dx - -end module grid_gen_utils diff --git a/grid_gen/global_scvt/src/module_grid_meta.F b/grid_gen/global_scvt/src/module_grid_meta.F deleted file mode 100644 index 4d513830a..000000000 --- a/grid_gen/global_scvt/src/module_grid_meta.F +++ /dev/null @@ -1,1470 +0,0 @@ -module grid_meta - - contains - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE WRITE_GRID - ! - ! Create metadata for mesh and write out the complete grid information. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine write_grid(rlat, rlon, n, vclat, vclon, nvc, alist, clist) - - use data_types - use grid_constants - use sphere_utilities - use write_netcdf - use scvt - - implicit none - - integer, intent(in) :: n, nvc - real, dimension(n), intent(inout) :: rlat, rlon - real, dimension(nvc), intent(inout) :: vclat, vclon - type (adjacency_list) :: alist, clist - - integer, parameter :: maxEdges = 10 - - type tree_pointer_list - integer :: stage - type (binary_tree), pointer :: p - end type tree_pointer_list - - integer :: i, ii, j, js, k, prev_edge, next_edge, vtx, nObtuse, jj, iEdge, iFlag - integer :: nCells, nEdges, nVertices, nVertLevels, nTracers - integer, dimension(3) :: cellsOnTriangle - integer, allocatable, dimension(:) :: permutation, indexToCellID, indexToEdgeID, indexToVertexID - integer, allocatable, dimension(:) :: nEdgesOnCell, nEdgesOnEdge, nEdgesOnVertex, nCellsOnVertex - integer, allocatable, dimension(:) :: tempEdgesOnEdge - integer, allocatable, dimension(:,:) :: cellsOnEdge, edgesOnCell, verticesOnCell, cellsOnCell, & - verticesOnEdge, edgesOnEdge, edgesOnVertex, cellsOnVertex - integer, allocatable, dimension(:) :: isObtuse - real, allocatable, dimension(:,:) :: weightsOnEdge, kiteAreasOnVertex - integer :: temp - logical :: tdrtest = .true. - real :: sum_r, area, r, s, de, rtmp - real, allocatable, dimension(:) :: latCell, lonCell, latEdge, lonEdge, angleEdge, latVertex, lonVertex, & - lat1Edge, lon1Edge, lat2Edge, lon2Edge, dvEdge, dv1Edge, dv2Edge, dcEdge, & - areaCell, areaTriangle, fEdge, fVertex, h_s, u_sbr - real, allocatable, dimension(:,:,:) :: u, v, h, vh, circulation, vorticity, ke - real, allocatable, dimension(:,:,:,:) :: tracers - real, allocatable, dimension(:) :: xCell, yCell, zCell, xEdge, yEdge, zEdge, xVertex, yVertex, zVertex, meshDensity - type (geo_point) :: vertex1GP, vertex2GP, cell1GP, cell2GP, cell3GP, edgeGP, edgeGP_prev, edgeGP_next, pCell - type (geo_point) :: center - type (geo_point), allocatable, dimension(:) :: points - type (binary_tree), pointer :: treeRoot - type (tree_pointer_list), allocatable, dimension(:) :: cursor - - - ! - ! Compute number of cells - ! - nCells = alist % nNodes - - - ! - ! Compute number of edges - ! - nEdges = alist % nNeighbors / 2 - - - ! - ! Compute number of vertices - ! - nVertices = nEdges - nCells + 2 ! Euler's formula - - - allocate(latCell(nCells)) - allocate(lonCell(nCells)) - allocate(meshDensity(nCells)) - allocate(xCell(nCells)) - allocate(yCell(nCells)) - allocate(zCell(nCells)) - allocate(latEdge(nEdges)) - allocate(lonEdge(nEdges)) - allocate(xEdge(nEdges)) - allocate(yEdge(nEdges)) - allocate(zEdge(nEdges)) - allocate(dvEdge(nEdges)) - allocate(dv1Edge(nEdges)) - allocate(dv2Edge(nEdges)) - allocate(dcEdge(nEdges)) - allocate(areaCell(nCells)) - allocate(areaTriangle(nVertices)) - allocate(angleEdge(nEdges)) - allocate(u_sbr(nEdges)) - allocate(latVertex(nVertices)) - allocate(lonVertex(nVertices)) - allocate(xVertex(nVertices)) - allocate(yVertex(nVertices)) - allocate(zVertex(nVertices)) - allocate(lat1Edge(nEdges)) - allocate(lon1Edge(nEdges)) - allocate(lat2Edge(nEdges)) - allocate(lon2Edge(nEdges)) - allocate(cellsOnEdge(2,nEdges)) - allocate(isObtuse(nVertices)) - allocate(edgesOnCell(maxEdges,nCells)) - allocate(verticesOnCell(maxEdges,nCells)) - allocate(verticesOnEdge(2,nEdges)) - allocate(edgesOnEdge(2*maxEdges,nEdges)) - allocate(edgesOnVertex(3,nVertices)) - allocate(cellsOnVertex(3,nVertices)) - allocate(tempEdgesOnEdge(2*maxEdges)) - allocate(weightsOnEdge(2*maxEdges,nEdges)) - allocate(kiteAreasOnVertex(3,nVertices)) - allocate(cellsOnCell(maxEdges,nCells)) - allocate(nEdgesOnCell(nCells)) - allocate(nEdgesOnEdge(nEdges)) - allocate(nEdgesOnVertex(nVertices)) - allocate(nCellsOnVertex(nVertices)) - allocate(cursor(nEdges)) - - - ! - ! Determine lat/lon for each cell - ! - do i=1,nCells - latCell(i) = rlat(i) - lonCell(i) = rlon(i) - end do - - - ! - ! Determine lat/lon for each vertex - ! - do i=1,nVertices - latVertex(i) = vclat(i) - lonVertex(i) = vclon(i) - end do - - - ! - ! Construct a list of edges (actually, a BST) - ! - nullify(treeRoot) - do i=1,nCells - do j = clist % start(i), clist % start(i) + clist % len(i) - 2 - js = j+1 - call insert_edge_to_tree(i, & - clist % neighbor(j), clist % neighbor(js), & - vclat(clist % neighbor(j)), vclon(clist % neighbor(j)), & - vclat(clist % neighbor(js)), vclon(clist % neighbor(js)), & - treeRoot) - end do - - js = clist % start(i) - call insert_edge_to_tree(i, & - clist % neighbor(j), clist % neighbor(js), & - vclat(clist % neighbor(j)), vclon(clist % neighbor(j)), & - vclat(clist % neighbor(js)), vclon(clist % neighbor(js)), & - treeRoot) - end do - - - ! - ! Determine which cells are on each edge, and lat/lon of edge endpoints - ! - i = 1 - j = 1 - cursor(i) % p => treeRoot - cursor(i) % stage = 0 - do while (i > 0) - if (associated(cursor(i) % p % left) .and. cursor(i) % stage < 1) then - cursor(i) % stage = 1 - i = i + 1 - cursor(i) % p => cursor(i-1) % p % left - cursor(i) % stage = 0 - cycle - end if - if (cursor(i) % stage < 2) then - cellsOnEdge(1,j) = cursor(i) % p % node1 - cellsOnEdge(2,j) = cursor(i) % p % node2 - lat1Edge(j) = cursor(i) % p % lat1 - lon1Edge(j) = cursor(i) % p % lon1 - lat2Edge(j) = cursor(i) % p % lat2 - lon2Edge(j) = cursor(i) % p % lon2 - verticesOnEdge(1,j) = cursor(i) % p % vertex1 - verticesOnEdge(2,j) = cursor(i) % p % vertex2 - j = j + 1 - cursor(i) % stage = 2 - end if - if (associated(cursor(i) % p % right) .and. cursor(i) % stage < 3) then - cursor(i) % stage = 3 - i = i + 1 - cursor(i) % p => cursor(i-1) % p % right - cursor(i) % stage = 0 - cycle - end if - i = i - 1 - end do - - - ! - ! Compute distance between cell centroids for each edge - ! - do i=1,nEdges - cell1GP % lat = latCell(cellsOnEdge(1,i)) - cell1GP % lon = lonCell(cellsOnEdge(1,i)) - cell2GP % lat = latCell(cellsOnEdge(2,i)) - cell2GP % lon = lonCell(cellsOnEdge(2,i)) - dcEdge(i) = sphere_distance(cell1GP, cell2GP, 1.0) - end do - - - ! - ! Determine which edges are on each cell - ! - do i=1,nCells - nEdgesOnCell(i) = 0 - end do - - do j=1,nEdges - nEdgesOnCell(cellsOnEdge(1,j)) = nEdgesOnCell(cellsOnEdge(1,j)) + 1 - if (nEdgesOnCell(cellsOnEdge(1,j)) > maxEdges) then - write(0,*) 'Houston, we have an edge problem.' - stop - end if - edgesOnCell(nEdgesOnCell(cellsOnEdge(1,j)),cellsOnEdge(1,j)) = j - - nEdgesOnCell(cellsOnEdge(2,j)) = nEdgesOnCell(cellsOnEdge(2,j)) + 1 - if (nEdgesOnCell(cellsOnEdge(2,j)) > maxEdges) then - write(0,*) 'Houston, we have an edge problem.' - stop - end if - edgesOnCell(nEdgesOnCell(cellsOnEdge(2,j)),cellsOnEdge(2,j)) = j - end do - do i=1,nCells - do j=nEdgesOnCell(i)+1,maxEdges - edgesOnCell(j,i) = edgesOnCell(nEdgesOnCell(i),i) - end do - end do - - - ! - ! Determine which cells are on each cell - ! - do i=1,nCells - k = 1 - do j = alist % start(i), alist % start(i) + alist % len(i) - 1 - cellsOnCell(k,i) = alist % neighbor(j) - k = k + 1 - end do - do j=k,maxEdges - cellsOnCell(j,i) = alist % neighbor(alist % start(i) + alist % len(i) - 1) - end do - end do - - ! Re-order edgesOnCell to be in the same order as cellsOnCell - do i=1,nCells - do j=1,nEdgesOnCell(i) - if (cellsOnEdge(1,edgesOnCell(j,i)) /= cellsOnCell(j,i) .and. cellsOnEdge(2,edgesOnCell(j,i)) /= cellsOnCell(j,i)) then - ii = 0 - do k=j+1,nEdgesOnCell(i) - if (cellsOnEdge(1,edgesOnCell(k,i)) == cellsOnCell(j,i) .or. cellsOnEdge(2,edgesOnCell(k,i)) == cellsOnCell(j,i)) then - ! Swap edgesOnCell(k,i) and edgesOnCell(j,i) - ii = edgesOnCell(j,i) - edgesOnCell(j,i) = edgesOnCell(k,i) - edgesOnCell(k,i) = ii - exit - end if - end do -if (ii == 0) then - write(0,*) 'We didn''t find an edge to match cellsOnCell' - stop -end if - end if - end do - end do - - - ! - ! Compute lat/lon of each edge - ! - do j=1,nEdges - vertex1GP % lat = lat1Edge(j) - vertex1GP % lon = lon1Edge(j) - vertex2GP % lat = lat2Edge(j) - vertex2GP % lon = lon2Edge(j) - cell1GP % lat = latCell(cellsOnEdge(1,j)) - cell1GP % lon = lonCell(cellsOnEdge(1,j)) - cell2GP % lat = latCell(cellsOnEdge(2,j)) - cell2GP % lon = lonCell(cellsOnEdge(2,j)) - call compute_edge_latlon(cell1GP, cell2GP, vertex1GP, vertex2GP, edgeGP) - - dvEdge(j) = sphere_distance(vertex1GP, vertex2GP, 1.0) - rtmp = (vertex2GP % lat - vertex1GP % lat) / dvEdge(j) - if (rtmp > 1.0) rtmp = 1.0 - if (rtmp < -1.0) rtmp = -1.0 - rtmp = acos(rtmp) - angleEdge(j) = meridian_angle(edgeGP, vertex2GP) - angleEdge(j) = sign(rtmp, angleEdge(j)) - if (angleEdge(j) > pii) angleEdge(j) = angleEdge(j) - 2.0*pii - if (angleEdge(j) < -pii) angleEdge(j) = angleEdge(j) + 2.0*pii - latEdge(j) = edgeGP % lat - lonEdge(j) = edgeGP % lon - - ! Compute u for solid body rotation - u_sbr(j) = (sin(vertex2GP % lat) - sin(vertex1GP % lat)) / dvEdge(j) - - ! Make sure order of vertices on edge is correct -! if (is_flipped_vertex_order2(edgeGP, cell2GP, vertex2GP)) then - if (is_flipped_vertex_order(cell1GP % lat, cell1GP % lon, & - cell2GP % lat, cell2GP % lon, & - vertex1GP % lat, vertex1GP % lon, & - vertex2GP % lat, vertex2GP % lon)) then - temp = verticesOnEdge(1,j) - verticesOnEdge(1,j) = verticesOnEdge(2,j) - verticesOnEdge(2,j) = temp - u_sbr(j) = -1.0*u_sbr(j) - angleEdge(j) = angleEdge(j) + pii - if (angleEdge(j) > pii) angleEdge(j) = angleEdge(j) - 2.0*pii - if (angleEdge(j) < -pii) angleEdge(j) = angleEdge(j) + 2.0*pii - end if - - end do - - - ! - ! Determine which vertices are on each cell - ! - do i=1,nCells - if (i == cellsOnEdge(1,edgesOnCell(1,i))) then - verticesOnCell(1,i) = verticesOnEdge(1,edgesOnCell(1,i)) - else if (i == cellsOnEdge(2,edgesOnCell(1,i))) then - verticesOnCell(1,i) = verticesOnEdge(2,edgesOnCell(1,i)) - else -write(0,*) 'THIS EDGE SHOULDN''T BELONG TO THIS CELL' - end if - do j=1,nEdgesOnCell(i)-1 - if (verticesOnEdge(1,edgesOnCell(j,i)) == verticesOnCell(j,i)) then - verticesOnCell(j+1,i) = verticesOnEdge(2,edgesOnCell(j,i)) - else if (verticesOnEdge(2,edgesOnCell(j,i)) == verticesOnCell(j,i)) then - verticesOnCell(j+1,i) = verticesOnEdge(1,edgesOnCell(j,i)) - else -write(0,*) 'Broken chain of vertex-edge-vertex.' -stop - end if - end do - end do - - - ! - ! Determine which edges are incident with each vertex - ! - nEdgesOnVertex(:) = 0 - do j=1,nEdges - nEdgesOnVertex(verticesOnEdge(1,j)) = nEdgesOnVertex(verticesOnEdge(1,j)) + 1 - if (nEdgesOnVertex(verticesOnEdge(1,j)) > 3) then - write(0,*) 'We have too many edges incident with vertex ',verticesOnEdge(1,j) - stop - end if - edgesOnVertex(nEdgesOnVertex(verticesOnEdge(1,j)),verticesOnEdge(1,j)) = j - - nEdgesOnVertex(verticesOnEdge(2,j)) = nEdgesOnVertex(verticesOnEdge(2,j)) + 1 - if (nEdgesOnVertex(verticesOnEdge(2,j)) > 3) then - write(0,*) 'We have too many edges incident with vertex ',verticesOnEdge(2,j) - stop - end if - edgesOnVertex(nEdgesOnVertex(verticesOnEdge(2,j)),verticesOnEdge(2,j)) = j - end do - - - ! - ! Determine which cells share each vertex - ! - nCellsOnVertex(:) = 0 - do i=1,nCells - do j=1,nEdgesOnCell(i) - nCellsOnVertex(verticesOnCell(j,i)) = nCellsOnVertex(verticesOnCell(j,i)) + 1 - if (nCellsOnVertex(verticesOnCell(j,i)) > 3) then - write(0,*) 'We have too many cells sharing vertex ', verticesOnCell(j,i) - stop - end if - cellsOnVertex(nCellsOnVertex(verticesOnCell(j,i)),verticesOnCell(j,i)) = i - end do - end do - - - ! - ! Determine which edges "neighbor" each edge - ! - do j=1,nEdges - allocate(points(nEdgesOnCell(cellsOnEdge(1,j)))) - allocate(permutation(nEdgesOnCell(cellsOnEdge(1,j)))) - js = 1 - points(js) % lat = latEdge(j) - points(js) % lon = lonEdge(j) - permutation(js) = j - js = js + 1 - center % lat = latCell(cellsOnEdge(1,j)) - center % lon = lonCell(cellsOnEdge(1,j)) - - do k=1,nEdgesOnCell(cellsOnEdge(1,j)) - if (edgesOnCell(k,cellsOnEdge(1,j)) /= j) then - nEdgesOnEdge(j) = nEdgesOnEdge(j) + 1 - edgesOnEdge(nEdgesOnEdge(j),j) = edgesOnCell(k,cellsOnEdge(1,j)) - points(js) % lat = latEdge(edgesOnCell(k,cellsOnEdge(1,j))) - points(js) % lon = lonEdge(edgesOnCell(k,cellsOnEdge(1,j))) - permutation(js) = edgesOnCell(k,cellsOnEdge(1,j)) - js = js + 1 - end if - end do - call order_points_ccw(center, nEdgesOnCell(cellsOnEdge(1,j)), points, permutation) - do k=2,nEdgesOnCell(cellsOnEdge(1,j)) - edgesOnEdge(k-1,j) = permutation(k) - end do - deallocate(points) - deallocate(permutation) - - allocate(points(nEdgesOnCell(cellsOnEdge(2,j)))) - allocate(permutation(nEdgesOnCell(cellsOnEdge(2,j)))) - js = 1 - points(js) % lat = latEdge(j) - points(js) % lon = lonEdge(j) - permutation(js) = j - js = js + 1 - center % lat = latCell(cellsOnEdge(2,j)) - center % lon = lonCell(cellsOnEdge(2,j)) - - do k=1,nEdgesOnCell(cellsOnEdge(2,j)) - if (edgesOnCell(k,cellsOnEdge(2,j)) /= j) then - nEdgesOnEdge(j) = nEdgesOnEdge(j) + 1 - edgesOnEdge(nEdgesOnEdge(j),j) = edgesOnCell(k,cellsOnEdge(2,j)) - points(js) % lat = latEdge(edgesOnCell(k,cellsOnEdge(2,j))) - points(js) % lon = lonEdge(edgesOnCell(k,cellsOnEdge(2,j))) - permutation(js) = edgesOnCell(k,cellsOnEdge(2,j)) - js = js + 1 - end if - end do - call order_points_ccw(center, nEdgesOnCell(cellsOnEdge(2,j)), points, permutation) - do k=2,nEdgesOnCell(cellsOnEdge(2,j)) - edgesOnEdge(nEdgesOnCell(cellsOnEdge(1,j))+k-2,j) = permutation(k) - end do - deallocate(points) - deallocate(permutation) - end do - - - ! - ! Compute area of each cell - ! - do i=1,nCells - cell1GP % lat = latCell(i) - cell1GP % lon = lonCell(i) - areaCell(i) = 0.0 - do j=1,nEdgesOnCell(i)-1 - vertex1GP % lat = latVertex(verticesOnCell(j,i)) - vertex1GP % lon = lonVertex(verticesOnCell(j,i)) - vertex2GP % lat = latVertex(verticesOnCell(j+1,i)) - vertex2GP % lon = lonVertex(verticesOnCell(j+1,i)) - areaCell(i) = areaCell(i) + triangle_area(cell1GP, vertex1GP, vertex2GP, 1.0) - end do - vertex1GP % lat = latVertex(verticesOnCell(j,i)) - vertex1GP % lon = lonVertex(verticesOnCell(j,i)) - vertex2GP % lat = latVertex(verticesOnCell(1,i)) - vertex2GP % lon = lonVertex(verticesOnCell(1,i)) - areaCell(i) = areaCell(i) + triangle_area(cell1GP, vertex1GP, vertex2GP, 1.0) - end do - - - ! - ! Compute area of triangles associated with each vertex - ! - do i=1,nVertices - cell1GP % lat = latCell(cellsOnVertex(1,i)) - cell1GP % lon = lonCell(cellsOnVertex(1,i)) - cell2GP % lat = latCell(cellsOnVertex(2,i)) - cell2GP % lon = lonCell(cellsOnVertex(2,i)) - cell3GP % lat = latCell(cellsOnVertex(3,i)) - cell3GP % lon = lonCell(cellsOnVertex(3,i)) - areaTriangle(i) = triangle_area(cell1GP, cell2GP, cell3GP, 1.0) - end do - - ! - ! Test to see if any of the triangles are obtuse - ! - nObtuse = 0 - do i=1,nVertices - cell1GP % lat = latCell(cellsOnVertex(1,i)) - cell1GP % lon = lonCell(cellsOnVertex(1,i)) - cell2GP % lat = latCell(cellsOnVertex(2,i)) - cell2GP % lon = lonCell(cellsOnVertex(2,i)) - cell3GP % lat = latCell(cellsOnVertex(3,i)) - cell3GP % lon = lonCell(cellsOnVertex(3,i)) - isObtuse(i) = obtuse(cell1GP, cell2GP, cell3GP) - if(isObtuse(i).gt.0) nObtuse = nObtuse + 1 - end do - write(6,*) ' number of obtuse triangles ', nObtuse - - - kiteAreasOnVertex(:,:) = -1.0 - - ! - ! Compute weights used in tangential velocity reconstruction - ! - do j=1,nEdges - cell1GP % lat = latCell(cellsOnEdge(1,j)) - cell1GP % lon = lonCell(cellsOnEdge(1,j)) - cell2GP % lat = latCell(cellsOnEdge(2,j)) - cell2GP % lon = lonCell(cellsOnEdge(2,j)) - de = dcEdge(j) - prev_edge = j - sum_r = 0.0 - do i=1,nEdgesOnCell(cellsOnEdge(1,j))-1 - next_edge = edgesOnEdge(i,j) - if ((verticesOnEdge(1,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(1,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(1,prev_edge) - else if ((verticesOnEdge(2,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(2,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(2,prev_edge) - else - write(0,*) 'Somehow these edges don''t share a vertex',j - write(0,*) 'Cells are ',cellsOnEdge(1,j),cellsOnEdge(2,j) - write(0,*) 'Edge ',prev_edge,' has vertices ',verticesOnEdge(1,prev_edge),verticesOnEdge(2,prev_edge) - write(0,*) 'Edge ',next_edge,' has vertices ',verticesOnEdge(1,next_edge),verticesOnEdge(2,next_edge) - write(0,*) 'edgesOnEdge()=',edgesOnEdge(:,j) - write(0,*) 'verticesOnCell(1) = ',verticesOnCell(:,cellsOnEdge(1,j)) - write(0,*) 'edgesOnCell(1) = ',edgesOnCell(:,cellsOnEdge(1,j)) - write(0,*) 'verticesOnEdge(12821)=',verticesOnEdge(:,12821) - write(0,*) 'verticesOnEdge(12823)=',verticesOnEdge(:,12823) - write(0,*) 'verticesOnEdge(13243)=',verticesOnEdge(:,13243) - write(0,*) 'verticesOnEdge(13245)=',verticesOnEdge(:,13245) - write(0,*) 'verticesOnEdge(13448)=',verticesOnEdge(:,13448) - write(0,*) 'verticesOnEdge(13449)=',verticesOnEdge(:,13449) - stop - end if - cell1GP % lat = latCell(cellsOnEdge(1,j)) - cell1GP % lon = lonCell(cellsOnEdge(1,j)) - vertex1GP % lat = latVertex(vtx) - vertex1GP % lon = lonVertex(vtx) - edgeGP_prev % lat = latEdge(prev_edge) - edgeGP_prev % lon = lonEdge(prev_edge) - edgeGP_next % lat = latEdge(next_edge) - edgeGP_next % lon = lonEdge(next_edge) - - if(isObtuse(vtx).eq.0) then - area = abs(triangle_area(cell1GP, vertex1GP, edgeGP_prev, 1.0)) - area = area + abs(triangle_area(cell1GP, vertex1GP, edgeGP_next, 1.0)) - else - if(cellsOnVertex(isObtuse(vtx),vtx).eq.cellsOnEdge(1,j)) then - iFlag = 0 - do ii=1,3 - jj = edgesOnVertex(ii,vtx) - if(jj.ne.prev_edge.and.jj.ne.next_edge) then - write(6,*) jj, prev_edge, next_edge - if(iFlag.eq.1) then - write(6,*) ' can not be true ' - stop - endif - iEdge = jj - iFlag = 1 - endif - enddo - edgeGP % lat = latEdge(iEdge) - edgeGP % lon = lonEdge(iEdge) - area = abs(triangle_area(cell1GP, edgeGP, edgeGP_prev, 1.0)) - area = area + abs(triangle_area(cell1GP, edgeGP, edgeGP_next, 1.0)) - else - area = abs(triangle_area(cell1GP, edgeGP_prev, edgeGP_next, 1.0)) - endif - endif - - do ii=1,3 - if (cellsOnEdge(1,j) == cellsOnVertex(ii,vtx)) then - kiteAreasOnVertex(ii,vtx) = area - exit - end if - end do - - if(.not.tdrtest) then - write(6,*) ' not tdrtest' - r = area / areaCell(cellsOnEdge(1,j)) - sum_r = sum_r + r - if (cellsOnEdge(1,j) == cellsOnEdge(1,edgesOnEdge(i,j))) then - s = 1.0 - else - s = -1.0 - end if - weightsOnEdge(i,j) = s*(0.5-sum_r)*dvEdge(edgesOnEdge(i,j))/de - endif - - prev_edge = next_edge - end do - - prev_edge = j - sum_r = 0.0 - do i=nEdgesOnCell(cellsOnEdge(1,j)),nEdgesOnEdge(j) - next_edge = edgesOnEdge(i,j) - if ((verticesOnEdge(1,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(1,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(1,prev_edge) - else if ((verticesOnEdge(2,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(2,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(2,prev_edge) - else - write(0,*) 'Somehow these edges don''t share a vertex' - write(0,*) 'Edge ',prev_edge,' has vertices ',verticesOnEdge(1,prev_edge),verticesOnEdge(2,prev_edge) - write(0,*) 'Edge ',next_edge,' has vertices ',verticesOnEdge(1,next_edge),verticesOnEdge(2,next_edge) - write(0,*) 'VerticesOnCell(2) = ',verticesOnCell(:,cellsOnEdge(2,j)) - write(0,*) 'edgesOnCell(2) = ',edgesOnCell(:,cellsOnEdge(2,j)) - stop - end if - cell1GP % lat = latCell(cellsOnEdge(2,j)) - cell1GP % lon = lonCell(cellsOnEdge(2,j)) - vertex1GP % lat = latVertex(vtx) - vertex1GP % lon = lonVertex(vtx) - - edgeGP_prev % lat = latEdge(prev_edge) - edgeGP_prev % lon = lonEdge(prev_edge) - edgeGP_next % lat = latEdge(next_edge) - edgeGP_next % lon = lonEdge(next_edge) - - if(isObtuse(vtx).eq.0) then - area = abs(triangle_area(cell1GP, vertex1GP, edgeGP_prev, 1.0)) - area = area + abs(triangle_area(cell1GP, vertex1GP, edgeGP_next, 1.0)) - else - if(cellsOnVertex(isObtuse(vtx),vtx).eq.cellsOnEdge(2,j)) then - iFlag = 0 - do ii=1,3 - jj = edgesOnVertex(ii,vtx) - if(jj.ne.prev_edge.and.jj.ne.next_edge) then - write(6,*) jj, prev_edge, next_edge - if(iFlag.eq.1) then - write(6,*) ' can not be true ' - stop - endif - iEdge = jj - iFlag = 1 - endif - enddo - edgeGP % lat = latEdge(iEdge) - edgeGP % lon = lonEdge(iEdge) - area = abs(triangle_area(cell1GP, edgeGP, edgeGP_prev, 1.0)) - area = area + abs(triangle_area(cell1GP, edgeGP, edgeGP_next, 1.0)) - else - area = abs(triangle_area(cell1GP, edgeGP_prev, edgeGP_next, 1.0)) - endif - endif - - do ii=1,3 - if (cellsOnEdge(2,j) == cellsOnVertex(ii,vtx)) then - kiteAreasOnVertex(ii,vtx) = area - exit - end if - end do - - if(.not.tdrtest) then - write(6,*) ' not tdrtest' - r = area / areaCell(cellsOnEdge(2,j)) - sum_r = sum_r + r - if (cellsOnEdge(2,j) == cellsOnEdge(1,edgesOnEdge(i,j))) then - s = -1.0 - else - s = 1.0 - end if - weightsOnEdge(i,j) = s*(0.5-sum_r)*dvEdge(edgesOnEdge(i,j))/de - endif - - prev_edge = next_edge - end do - end do - -!---- - - if(tdrtest) then - - write(6,*) ' testing tdr ' - - areaTriangle = 0.0 - areaCell = 0.0 - do i=1,nVertices - do ii=1,3 - jj = cellsOnVertex(ii,i) - areaCell(jj) = areaCell(jj) + kiteAreasOnVertex(ii,i) - areaTriangle(i) = areaTriangle(i) + kiteAreasOnVertex(ii,i) - enddo - end do - - do j=1,nEdges - de = dcEdge(j) - prev_edge = j - sum_r = 0.0 - do i=1,nEdgesOnCell(cellsOnEdge(1,j))-1 - next_edge = edgesOnEdge(i,j) - if ((verticesOnEdge(1,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(1,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(1,prev_edge) - else if ((verticesOnEdge(2,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(2,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(2,prev_edge) - else - stop - end if - - do ii=1,3 - if(cellsOnVertex(ii,vtx).eq.cellsOnEdge(1,j)) then - area = kiteAreasOnVertex(ii,vtx) - exit - endif - enddo - - r = area / areaCell(cellsOnEdge(1,j)) - sum_r = sum_r + r - if (cellsOnEdge(1,j) == cellsOnEdge(1,edgesOnEdge(i,j))) then - s = 1.0 - else - s = -1.0 - end if - weightsOnEdge(i,j) = s*(0.5-sum_r)*dvEdge(edgesOnEdge(i,j))/de - prev_edge = next_edge - end do - - prev_edge = j - sum_r = 0.0 - do i=nEdgesOnCell(cellsOnEdge(1,j)),nEdgesOnEdge(j) - next_edge = edgesOnEdge(i,j) - if ((verticesOnEdge(1,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(1,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(1,prev_edge) - else if ((verticesOnEdge(2,prev_edge) == verticesOnEdge(1,next_edge)) .or. & - (verticesOnEdge(2,prev_edge) == verticesOnEdge(2,next_edge))) then - vtx = verticesOnEdge(2,prev_edge) - else - stop - end if - - do ii=1,3 - if(cellsOnVertex(ii,vtx).eq.cellsOnEdge(2,j)) then - area = kiteAreasOnVertex(ii,vtx) - exit - endif - enddo - - r = area / areaCell(cellsOnEdge(2,j)) - sum_r = sum_r + r - if (cellsOnEdge(2,j) == cellsOnEdge(1,edgesOnEdge(i,j))) then - s = -1.0 - else - s = 1.0 - end if - weightsOnEdge(i,j) = s*(0.5-sum_r)*dvEdge(edgesOnEdge(i,j))/de - prev_edge = next_edge - end do - end do - - write(6,*) ' areas ', minval(areaCell), maxval(areaCell), maxval(areaCell)/minval(areaCell) - write(6,*) ' dcEdge ', minval(dcEdge), maxval(dcEdge), maxval(dcEdge)/minval(dcEdge) - - endif ! tdrtest - -!----- - - do i=1,nEdges - edgeGP % lat = latEdge(i) - edgeGP % lon = lonEdge(i) - vertex1GP % lat = latVertex(verticesOnEdge(1,i)) - vertex1GP % lon = lonVertex(verticesOnEdge(1,i)) - vertex2GP % lat = latVertex(verticesOnEdge(2,i)) - vertex2GP % lon = lonVertex(verticesOnEdge(2,i)) - dv1Edge(i) = sphere_distance(edgeGP, vertex1GP, 1.0) - dv2Edge(i) = sphere_distance(edgeGP, vertex2GP, 1.0) - end do - - - write(0,*) 'There are ', nCells, ' cells and ', nEdges, ' edges' - write(0,*) ' ' - write(0,*) 'Total area of all cells is ',sum(areaCell(:)) - write(0,*) 'Total triangle area=',sum(areaTriangle(:)) - write(0,*) 'Total kite area=',sum(kiteAreasOnVertex(:,:)) - - - do i=1,nCells - cell1GP % lat = latCell(i) - cell1GP % lon = lonCell(i) - call convert_lx(xCell(i), yCell(i), zCell(i), 1.0, cell1GP) - end do - do i=1,nVertices - vertex1GP % lat = latVertex(i) - vertex1GP % lon = lonVertex(i) - call convert_lx(xVertex(i), yVertex(i), zVertex(i), 1.0, vertex1GP) - end do - do i=1,nEdges - edgeGP % lat = latEdge(i) - edgeGP % lon = lonEdge(i) - call convert_lx(xEdge(i), yEdge(i), zEdge(i), 1.0, edgeGP) - end do - - - nVertLevels = 1 - nTracers = 1 - - allocate(indexToCellID(nCells)) - allocate(indexToEdgeID(nEdges)) - allocate(indexToVertexID(nVertices)) - allocate(fEdge(nEdges)) - allocate(fVertex(nVertices)) - allocate(h_s(nCells)) - allocate(u(nVertLevels,nEdges,1)) - allocate(v(nVertLevels,nEdges,1)) - allocate(h(nVertLevels,nCells,1)) - allocate(vh(nVertLevels,nEdges,1)) - allocate(circulation(nVertLevels,nVertices,1)) - allocate(vorticity(nVertLevels,nVertices,1)) - allocate(ke(nVertLevels,nCells,1)) - allocate(tracers(nTracers,nVertLevels,nCells,1)) - - u(1,:,1) = u_sbr(:) - h(:,:,1) = 1000.0 - v(:,:,1) = 0.0 - vh(:,:,1) = 0.0 - circulation(:,:,1) = 0.0 - vorticity(:,:,1) = 0.0 - ke(:,:,1) = 0.0 - tracers(:,:,:,1) = 0.0 - - h_s(:) = 0.0 - do i=1,nCells - indexToCellID(i) = i - end do - do i=1,nEdges - fEdge(i) = 2.0 * 7.292E-5 * sin(latEdge(i)) - indexToEdgeID(i) = i - end do - do i=1,nVertices - fVertex(i) = 2.0 * 7.292E-5 * sin(latVertex(i)) - indexToVertexID(i) = i - end do - - do i=1,nCells - pCell%lat = latCell(i) - pCell%lon = lonCell(i) - if(pCell%lon.gt. pii) pCell%lon=pCell%lon-2.0*pii - if(pCell%lon.ge.-pii) pCell%lon=pCell%lon+2.0*pii - meshDensity(i) = density_for_point(pCell) - enddo - - call write_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - nVertLevels, & - nTracers & - ) - - call write_netcdf_fields( & - 1, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dv1Edge, & - dv2Edge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - h_s, & - u, & - v, & - h, & - vh, & - circulation, & - vorticity, & - ke, & - tracers & - ) - - call write_netcdf_finalize() - - - ! - ! Write out a file compatible with metis for block decomposition - ! - open(42,file='graph.info',form='formatted') - write(42,*) nCells, nEdges - do i=1,nCells - do j=1,nEdgesOnCell(i) - write(42,'(1x,i8)',advance='no') cellsOnCell(j,i) - end do - write(42,'(1x)') - end do - close(42) - - - ! - ! The following information is written out to permit further refinement - ! of a grid using the grid_gen code - ! - open(22,file='locs.dat.out',form='formatted') - write(22,*) nCells,nEdges - do i=1,nCells - write(22,'(10x,f22.10,f23.10,f23.10)') xCell(i), yCell(i), zCell(i) - end do - do i=1,nEdges - write(22,'(10x,f22.10,f23.10,f23.10)') xEdge(i), yEdge(i), zEdge(i) - end do - ! do i=1,nCells - ! write(22,'(f13.10,1x,f13.10)') latCell(i), lonCell(i) - ! end do - ! do i=1,nEdges - ! write(22,'(f13.10,1x,f13.10)') latEdge(i), lonEdge(i) - ! end do - close(22) - - ! - ! Write out data for visualization in OpenDx - ! - call write_OpenDX( nCells, & - nVertices, & - xCell, & - yCell, & - zCell, & - xVertex, & - yVertex, & - zVertex, & - nEdgesOnCell, & - verticesOnCell, & - areaCell ) - - - deallocate(indexToCellID) - deallocate(indexToEdgeID) - deallocate(indexToVertexID) - deallocate(fEdge) - deallocate(fVertex) - deallocate(h_s) - deallocate(u) - deallocate(v) - deallocate(h) - deallocate(vh) - deallocate(circulation) - deallocate(vorticity) - deallocate(ke) - deallocate(tracers) - - - deallocate(cursor) - deallocate(latCell) - deallocate(lonCell) - deallocate(xCell) - deallocate(yCell) - deallocate(zCell) - deallocate(latEdge) - deallocate(lonEdge) - deallocate(xEdge) - deallocate(yEdge) - deallocate(zEdge) - deallocate(dvEdge) - deallocate(dv1Edge) - deallocate(dv2Edge) - deallocate(dcEdge) - deallocate(areaCell) - deallocate(areaTriangle) - deallocate(angleEdge) - deallocate(u_sbr) - deallocate(latVertex) - deallocate(lonVertex) - deallocate(xVertex) - deallocate(yVertex) - deallocate(zVertex) - deallocate(lat1Edge) - deallocate(lon1Edge) - deallocate(lat2Edge) - deallocate(lon2Edge) - deallocate(cellsOnEdge) - deallocate(edgesOnCell) - deallocate(verticesOnCell) - deallocate(verticesOnEdge) - deallocate(edgesOnEdge) - deallocate(edgesOnVertex) - deallocate(cellsOnVertex) - deallocate(tempEdgesOnEdge) - deallocate(weightsOnEdge) - deallocate(kiteAreasOnVertex) - deallocate(cellsOnCell) - deallocate(nEdgesOnCell) - deallocate(nEdgesOnEdge) - deallocate(nEdgesOnVertex) - deallocate(nCellsOnVertex) - - end subroutine write_grid - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE INSERT_EDGE_TO_TREE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine insert_edge_to_tree(cellID, vertex1ID, vertex2ID, lat1, lon1, lat2, lon2, root) - - use data_types - - implicit none - - integer, intent(in) :: cellID - integer, intent(in) :: vertex1ID, vertex2ID - real, intent(in) :: lat1, lon1, lat2, lon2 - type (binary_tree), pointer :: root - - real :: tLat1, tLat2, tLon1, tLon2 - integer :: tID1, tID2 - logical :: found - type (binary_tree), pointer :: pre_cursor, cursor - - if (point_compare(lat1, lon1, lat2, lon2) > 0) then - tLat1 = lat1 - tLon1 = lon1 - tLat2 = lat2 - tLon2 = lon2 - tID1 = vertex1ID - tID2 = vertex2ID - else - tLat1 = lat2 - tLon1 = lon2 - tLat2 = lat1 - tLon2 = lon1 - tID1 = vertex2ID - tID2 = vertex1ID - end if - - if (.not. associated(root)) then - allocate(root) - root % lat1 = tLat1 - root % lon1 = tLon1 - root % lat2 = tLat2 - root % lon2 = tLon2 - root % vertex1 = tID1 - root % vertex2 = tID2 - root % node1 = cellID - nullify(root % left) - nullify(root % right) - nullify(root % parent) - - else - - found = .false. - cursor => root - do while (associated(cursor) .and. .not. found) - if (edge_compare(tLat1, tLon1, tLat2, tLon2, cursor % lat1, cursor % lon1, cursor % lat2, cursor % lon2) == 0) then - found = .true. - else if (edge_compare(tLat1, tLon1, tLat2, tLon2, cursor % lat1, cursor % lon1, cursor % lat2, cursor % lon2) > 0) then - pre_cursor => cursor - cursor => cursor % right - else - pre_cursor => cursor - cursor => cursor % left - end if - end do - - if (.not. found) then - if (edge_compare(tLat1, tLon1, tLat2, tLon2, pre_cursor % lat1, pre_cursor % lon1, pre_cursor % lat2, pre_cursor % lon2) > 0) then - allocate(pre_cursor % right) - cursor => pre_cursor % right - else - allocate(pre_cursor % left) - cursor => pre_cursor % left - end if - cursor % lat1 = tLat1 - cursor % lon1 = tLon1 - cursor % lat2 = tLat2 - cursor % lon2 = tLon2 - cursor % vertex1 = tID1 - cursor % vertex2 = tID2 - cursor % node1 = cellID - cursor % parent => pre_cursor - nullify(cursor % left) - nullify(cursor % right) - else - cursor % node2 = cellID - end if - - end if - - end subroutine insert_edge_to_tree - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! FUNCTION POINT_COMPARE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - integer function point_compare(lat1, lon1, lat2, lon2) - - implicit none - - real, intent(in) :: lat1, lon1, lat2, lon2 - - point_compare = -1 - - if (lat1 > lat2) then - point_compare = 1 - else if (lat1 == lat2) then - if (lon1 > lon2) then - point_compare = 1 - else if (lon1 == lon2) then - point_compare = 0 - end if - end if - - end function point_compare - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! FUNCTION EDGE_COMPARE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - integer function edge_compare(aLat1, aLon1, aLat2, aLon2, bLat1, bLon1, bLat2, bLon2) - - implicit none - - real, intent(in) :: aLat1, aLon1, aLat2, aLon2, bLat1, bLon1, bLat2, bLon2 - - edge_compare = -1 - - if (point_compare(aLat1,aLon1,bLat1,bLon1) > 0) then - edge_compare = 1 - else if (point_compare(aLat1,aLon1,bLat1,bLon1) == 0) then - if (point_compare(aLat2,aLon2,bLat2,bLon2) > 0) then - edge_compare = 1 - else if (point_compare(aLat2,aLon2,bLat2,bLon2) == 0) then - edge_compare = 0 - end if - end if - - end function edge_compare - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_EDGE_LATLON - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_edge_latlon(cell1, cell2, vertex1, vertex2, edge) - - use data_types - use sphere_utilities - - implicit none - - type (geo_point), intent(in) :: cell1, cell2, vertex1, vertex2 - type (geo_point), intent(out) :: edge - - call gc_intersect(cell1, cell2, vertex1, vertex2, edge) - - end subroutine compute_edge_latlon - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE is_flipped_vertex_order - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - logical function is_flipped_vertex_order(latCell1, lonCell1, & - latCell2, lonCell2, & - latVertex1, lonVertex1, & - latVertex2, lonVertex2) - - use data_types - use sphere_utilities - - implicit none - - real, intent(in) :: latCell1, lonCell1, & - latCell2, lonCell2, & - latVertex1, lonVertex1, & - latVertex2, lonVertex2 - - real :: xCell1, yCell1, zCell1 - real :: xCell2, yCell2, zCell2 - real :: xVertex1, yVertex1, zVertex1 - real :: xVertex2, yVertex2, zVertex2 - real :: xV1, yV1, zV1 - real :: xV2, yV2, zV2 - real :: ci, cj, ck - type (geo_point) :: cell1, cell2, vertex1, vertex2 - - cell1 % lat = latCell1 - cell1 % lon = lonCell1 - cell2 % lat = latCell2 - cell2 % lon = lonCell2 - vertex1 % lat = latVertex1 - vertex1 % lon = lonVertex1 - vertex2 % lat = latVertex2 - vertex2 % lon = lonVertex2 - - call convert_lx(xCell1, yCell1, zCell1, 1.0, cell1) - call convert_lx(xCell2, yCell2, zCell2, 1.0, cell2) - call convert_lx(xVertex1, yVertex1, zVertex1, 1.0, vertex1) - call convert_lx(xVertex2, yVertex2, zVertex2, 1.0, vertex2) - - xV1 = xCell2 - xCell1 - yV1 = yCell2 - yCell1 - zV1 = zCell2 - zCell1 - xV2 = xVertex2 - xVertex1 - yV2 = yVertex2 - yVertex1 - zV2 = zVertex2 - zVertex1 - - ci = yV1*zV2 - zV1*yV2 - cj = zV1*xV2 - xV1*zV2 - ck = xV1*yV2 - yV1*xV2 - - if ((ci*xCell1 + cj*yCell1 + ck*zCell1) >= 0.0) then - is_flipped_vertex_order = .false. - else - is_flipped_vertex_order = .true. - end if - - end function is_flipped_vertex_order - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE is_flipped_vertex_order2 - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - logical function is_flipped_vertex_order2(edge, cell2, vertex2) - - use data_types - use sphere_utilities - use grid_constants - - implicit none - - type (geo_point), intent(in) :: edge, cell2, vertex2 - - real :: xEdge, yEdge, zEdge - real :: xCell2, yCell2, zCell2 - real :: xVertex2, yVertex2, zVertex2 - real :: angle - - call convert_lx(xEdge, yEdge, zEdge, 1.0, edge) - call convert_lx(xCell2, yCell2, zCell2, 1.0, cell2) - call convert_lx(xVertex2, yVertex2, zVertex2, 1.0, vertex2) - - angle = plane_angle(xEdge, yEdge, zEdge, & - xCell2, yCell2, zCell2, & - xVertex2, yVertex2, zVertex2, & - xEdge, yEdge, zEdge) - - if (angle > 0.0 .and. angle < pii) then - is_flipped_vertex_order2 = .false. - else - is_flipped_vertex_order2 = .true. - end if - - end function is_flipped_vertex_order2 - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE ORDER_POINTS_CCW - ! - ! Given a center around which the ordering should be done, the array of points - ! is re-ordered in CCW order, taking the first point in the array to be the - ! first point in the ordering, and the vector from the origin to center - ! as the normal vector of the suface containing the points at the center. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine order_points_ccw(center, npts, points, permutation) - - use data_types - use sphere_utilities - use grid_constants - - implicit none - - type (geo_point), intent(in) :: center - integer, intent(in) :: npts - integer, dimension(npts), intent(inout) :: permutation - type (geo_point), dimension(npts), intent(inout) :: points - - integer :: i, j - integer :: itemp - real :: rtemp - real :: nx, ny, nz - real :: px, py, pz - real :: p0x, p0y, p0z - real, dimension(npts) :: angle - type (geo_point) :: ptemp - - call convert_lx(nx, ny, nz, 1.0, center) - call convert_lx(p0x, p0y, p0z, 1.0, points(1)) - - angle(1) = 0.0 - - do i=2,npts - call convert_lx(px, py, pz, 1.0, points(i)) - angle(i) = plane_angle(nx, ny, nz, p0x, p0y, p0z, px, py, pz, nx, ny, nz) - if (angle(i) < 0.0) angle(i) = angle(i) + 2.0*pii - if (angle(i) > 2.0*pii) angle(i) = angle(i) - 2.0*pii - end do - - do i=2,npts - do j=i+1,npts - if (angle(j) < angle(i)) then - rtemp = angle(i) - angle(i) = angle(j) - angle(j) = rtemp - - itemp = permutation(i) - permutation(i) = permutation(j) - permutation(j) = itemp - - ptemp = points(i) - points(i) = points(j) - points(j) = ptemp - end if - end do - end do - - end subroutine order_points_ccw - - subroutine write_OpenDX( nCells, & - nVertices, & - xCell, & - yCell, & - zCell, & - xVertex, & - yVertex, & - zVertex, & - nEdgesOnCell, & - verticesOnCell, & - areaCell ) - - integer, intent(in) :: nCells - integer, intent(in) :: nVertices - real (kind=RKIND), dimension(:), intent(in) :: xCell - real (kind=RKIND), dimension(:), intent(in) :: yCell - real (kind=RKIND), dimension(:), intent(in) :: zCell - real (kind=RKIND), dimension(:), intent(in) :: xVertex - real (kind=RKIND), dimension(:), intent(in) :: yVertex - real (kind=RKIND), dimension(:), intent(in) :: zVertex - integer, dimension(:), intent(in) :: nEdgesOnCell - integer, dimension(:,:), intent(in) :: verticesOnCell - real (kind=RKIND), dimension(:), intent(in) :: areaCell - - character(len=80) :: a, b, c, d, e, f - integer :: i, j, k, nVerticesTotal, iEdge, iLoop - - nVerticesTotal = 0 - do i=1,nCells - nVerticesTotal = nVerticesTotal + nEdgesOnCell(i) - enddo - - open(unit=1,file='dx/voronoi.dx',form='formatted',status='unknown') - - a = trim('object "positions list" class array type float rank 1 shape 3 items') - b = trim('ascii data file vor.position.data') - write(1,10) a, nVerticesTotal - write(1,10) b - write(1,*) - 10 format(a70,i10) - - a = trim('object "edge list" class array type int rank 0 items') - b = trim('ascii data file vor.edge.data') - c = trim('attribute "ref" string "positions"') - write(1,10) a, nVerticesTotal - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "loops list" class array type int rank 0 items') - b = trim('ascii data file vor.loop.data') - c = trim('attribute "ref" string "edges"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "face list" class array type int rank 0 items') - b = trim('ascii data file vor.face.data') - c = trim('attribute "ref" string "loops"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object 0 class array type float rank 0 items') - b = trim('data file vor.area.data') - c = trim('attribute "dep" string "faces"') - write(1,10) a, nCells - write(1,10) b - write(1,10) c - write(1,*) - - a = trim('object "area" class field') - b = trim('component "positions" "positions list"') - c = trim('component "edges" "edge list"') - d = trim('component "loops" "loops list"') - e = trim('component "faces" "face list"') - f = trim('component "data" 0') - write(1,10) a - write(1,10) b - write(1,10) c - write(1,10) d - write(1,10) e - write(1,10) f - - close(1) - - open(unit=10,file='dx/vor.area.data',form='formatted',status='unknown') - open(unit=11,file='dx/vor.face.data',form='formatted',status='unknown') - open(unit=12,file='dx/vor.loop.data',form='formatted',status='unknown') - open(unit=13,file='dx/vor.edge.data',form='formatted',status='unknown') - open(unit=14,file='dx/vor.position.data',form='formatted',status='unknown') - - iLoop = 0 - iEdge = 0 - do i=1,nCells - write(10,20) areaCell(i) - write(11,21) i-1 - write(12,21) iLoop - iLoop = iLoop + nEdgesOnCell(i) - do j=1,nEdgesOnCell(i) - write(13,21) iEdge - iEdge = iEdge + 1 - k = verticesOnCell(j,i) - write(14,22) xVertex(k), yVertex(k), zVertex(k) - enddo - enddo - - 20 format(e20.10) - 21 format(i20) - 22 format(3e20.10) - - close(10) - close(11) - close(12) - close(13) - close(14) - - - end subroutine write_OpenDX - - -end module grid_meta diff --git a/grid_gen/global_scvt/src/module_grid_params.F b/grid_gen/global_scvt/src/module_grid_params.F deleted file mode 100644 index 58c2717dd..000000000 --- a/grid_gen/global_scvt/src/module_grid_params.F +++ /dev/null @@ -1,43 +0,0 @@ -module grid_params - - integer :: np - logical :: locs_as_xyz - logical :: l2_conv, inf_conv - integer :: n_scvt_iterations - real :: eps - real :: min_dx - - contains - - subroutine read_namelist() - - implicit none - - integer :: funit - real :: pi - - namelist /domains/ np, locs_as_xyz, n_scvt_iterations, eps, l2_conv, inf_conv, min_dx - - pi = 4.0*atan(1.0) - - funit = 21 - - np = 40962 - locs_as_xyz = .true. - n_scvt_iterations = 0 - eps = 0.0000000001 - l2_conv = .true. - inf_conv = .false. - min_dx = 120000.0 - - open(funit,file='namelist.input',status='old',form='formatted') - read(funit,domains) - close(funit) - - if(l2_conv) then - inf_conv = .false. - endif - - end subroutine read_namelist - -end module grid_params diff --git a/grid_gen/global_scvt/src/module_scvt.F b/grid_gen/global_scvt/src/module_scvt.F deleted file mode 100644 index e414d56eb..000000000 --- a/grid_gen/global_scvt/src/module_scvt.F +++ /dev/null @@ -1,276 +0,0 @@ -module scvt - - use data_types - use sphere_utilities - use voronoi_utils - use grid_constants - use grid_params - - - contains - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE SCVT_SOLVE - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine scvt_solve(n, lend, rlat, rlon, nvc, list, lptr, fn) - - implicit none - - integer, intent(in) :: n, nvc, fn - integer, dimension(n), intent(inout) :: lend - integer, dimension(nvc), intent(inout) :: list, lptr - real, dimension(n), intent(inout) :: rlat, rlon - - integer :: maxitr - - integer :: i, j, k, iter - integer :: ntmax, nrow, nptri - integer, allocatable, dimension(:) :: listc - real :: area, density, tot_mass - real :: x, y, z, new_ctr_x, new_ctr_y, new_ctr_z - real, allocatable, dimension(:) :: vclat, vclon - real, allocatable, dimension(:) :: rlat_2, rlon_2 - type (geo_point) :: p1, p2, p3, pc - type (geo_point) :: p_n1, p_n2 - type (geo_point), dimension(3,64) :: ptri - real :: avg_movement, maxmovement, movement - logical converged - - maxitr = n_scvt_iterations - - maxmovement = 100000 - - - ntmax = 6*n - nrow = 6 - nptri = 64 - allocate(listc(nvc)) - allocate(vclat(nvc)) - allocate(vclon(nvc)) - allocate(rlat_2(n)) - allocate(rlon_2(n)) - - iter = 1 - converged = .false. - - do while (iter <= maxitr .and. .not.converged) - - !write(0,*) 'scvt iteration ',iter - - ! - ! Compute Voronoi corners - ! - call compute_vc(rlat, rlon, n, nrow, ntmax, list, lptr, lend, listc, vclat, vclon, nvc) - - ! - ! Loop over vertices - ! Within the loop, p0 always refers to the current vertex being processed - ! -!$OMP PARALLEL DO PRIVATE(I, J, K, NEW_CTR_X, NEW_CTR_Y, NEW_CTR_Z, TOT_MASS, P1, P2, P3, PC, AREA, DENSITY, X, Y, Z, PTRI) SHARED(RLAT, RLON, RLAT_2, RLON_2, LPTR, LEND, LISTC, VCLAT, VCLON, NPTRI) - do i=1,n - - new_ctr_x = 0.0 - new_ctr_y = 0.0 - new_ctr_z = 0.0 - tot_mass = 0.0 - - ! - ! Compute center of mass of Voronoi cell - ! - p1%lat = rlat(i) - p1%lon = rlon(i) - - k = lend(i) - p2%lat = vclat(listc(k)) - p2%lon = vclon(listc(k)) - if (p1%lon - p2%lon > pii) p2%lon = p2%lon + 2.0*pii - if (p1%lon - p2%lon < -pii) p2%lon = p2%lon - 2.0*pii - k = lptr(lend(i)) - p3%lat = vclat(listc(k)) - p3%lon = vclon(listc(k)) - if (p1%lon - p3%lon > pii) p3%lon = p3%lon + 2.0*pii - if (p1%lon - p3%lon < -pii) p3%lon = p3%lon - 2.0*pii - - call divide_triangle(p1, p2, p3, nptri, ptri) - do j=1,nptri - area = triangle_area(ptri(1,j), ptri(2,j), ptri(3,j), 1.0) - call center_of_mass(ptri(1,j), ptri(2,j), ptri(3,j), pc) - if (p1%lon - pc%lon > pii) pc%lon = pc%lon + 2.0*pii - if (p1%lon - pc%lon < -pii) pc%lon = pc%lon - 2.0*pii - density = density_for_point(pc) - tot_mass = tot_mass + area * density - - call convert_lx(x, y, z, 1.0, pc) - new_ctr_x = new_ctr_x + x*area*density - new_ctr_y = new_ctr_y + y*area*density - new_ctr_z = new_ctr_z + z*area*density - end do - - do while (k /= lend(i)) - k = lptr(k) - p2 = p3 - p3%lat = vclat(listc(k)) - p3%lon = vclon(listc(k)) - if (p1%lon - p3%lon > pii) p3%lon = p3%lon + 2.0*pii - if (p1%lon - p3%lon < -pii) p3%lon = p3%lon - 2.0*pii - if (abs(p2%lat - p3%lat) < 0.00001 .and. abs(p2%lon - p3%lon) < 0.00001) cycle - - - call divide_triangle(p1, p2, p3, nptri, ptri) - do j=1,nptri - area = triangle_area(ptri(1,j), ptri(2,j), ptri(3,j), 1.0) - call center_of_mass(ptri(1,j), ptri(2,j), ptri(3,j), pc) - if (p1%lon - pc%lon > pii) pc%lon = pc%lon + 2.0*pii - if (p1%lon - pc%lon < -pii) pc%lon = pc%lon - 2.0*pii - density = density_for_point(pc) - tot_mass = tot_mass + area * density - - call convert_lx(x, y, z, 1.0, pc) - new_ctr_x = new_ctr_x + x*area*density - new_ctr_y = new_ctr_y + y*area*density - new_ctr_z = new_ctr_z + z*area*density - end do - end do - - new_ctr_x = new_ctr_x / tot_mass - new_ctr_y = new_ctr_y / tot_mass - new_ctr_z = new_ctr_z / tot_mass - call convert_xl(new_ctr_x, new_ctr_y, new_ctr_z, pc) - rlat_2(i) = pc%lat - rlon_2(i) = pc%lon - - - end do -!$OMP END PARALLEL DO - - !Compute movement - if(mod(iter,100).eq.0) then - maxmovement = 0.0 - avg_movement = 0.0 - do i = 1,n - - p_n1%lat = rlat(i) - p_n1%lon = rlon(i) - p_n2%lat = rlat_2(i) - p_n2%lon = rlon_2(i) - - call convert_lx(x,y,z,1.0,p_n1) - call convert_lx(new_ctr_x, new_ctr_y, new_ctr_z,1.0,p_n2) - - !x y z computation - movement = sqrt((x - new_ctr_x)**2 + (y - new_ctr_y)**2 + (z - new_ctr_z)**2) - - if(movement > maxmovement) maxmovement = movement - avg_movement = avg_movement + movement/n - - enddo - if(avg_movement.lt.eps.and.l2_conv) converged=.true. - if(avg_movement.lt.eps.and.inf_conv) converged=.true. - write(6,*) n, iter, maxmovement, avg_movement - endif - - rlat(:) = rlat_2(:) - rlon(:) = rlon_2(:) - iter = iter + 1 - - end do - - deallocate(listc) - deallocate(vclat) - deallocate(vclon) - deallocate(rlat_2) - deallocate(rlon_2) - - if (maxitr > 0) write(0,*) 'Finished SCVT solve' - - end subroutine scvt_solve - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE RANDOM_POINT - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine random_point(p) - - type (geo_point), intent(inout) :: p - real :: x, y, z, m - real :: pi - - pi = 4.0*atan(1.0) - - x = 0.0 - y = 0.0 - z = 0.0 - m = 2.0 - - do while (m > 1.0 .or. (x == 0.0 .and. y == 0.0 .and. z == 0.0)) - call random_number(x) - call random_number(y) - call random_number(z) - x = x * 2.0 - 1.0 - y = y * 2.0 - 1.0 - z = z * 2.0 - 1.0 - m = x**2 + y**2 + z**2 - end do - - m = 1.0 / sqrt(m) - x = x * m - y = y * m - z = z * m - - call convert_xl(x, y, z, p) - - end subroutine random_point - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! FUNCTION DENSITY_FOR_POINT - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - real function density_for_point(p) - - implicit none - - type (geo_point), intent(in) :: p - - character (len=256) :: fname - real :: rx, ry, rz, prx, pry, prz - type (geo_point) :: p_local - real :: hgt - real :: r, norm, t_cent - real :: r1 - real :: pi - real :: width, trans_center, min_val - - pi = 4.0*atan(1.0) - - !density_for_point = 1.0 + (1.19*cos(p%lat-3.141592654/4.0))**16.0 - - ! Uniform Density Function - density_for_point = 1.0 - - !Target Density Function based on hyperbolic tangent - ! p_local%lat = latitude (radians) center of high-resolution region - ! p_local%lon = longitude (radians) center of high-resolution region - ! width = width of transition zone - ! trans_center = width (radians) of high resolution zone - ! minval = minimum density value. to have grid spacing vary by a factor of 8 - ! set minval = (1.0 / 8.0)**4 - - ! p_local%lat = pii/4.0 - ! p_local%lon = 1.25*pii - ! call convert_lx(rx, ry, rz, 1.0, p) - ! call convert_lx(prx, pry, prz, 1.0, p_local) - ! r = acos(rx*prx + ry*pry + rz*prz) - - ! width = 0.15 - ! trans_center = pi/6.0 - ! min_val = (1.0/8.0)**4 - ! norm = 1.0/(1.0-min_val) - ! density_for_point = ((tanh((trans_center-r)*(1.0/width))+1.0)/2)/norm + min_val - - end function density_for_point - -end module scvt diff --git a/grid_gen/global_scvt/src/module_sphere_utilities.F b/grid_gen/global_scvt/src/module_sphere_utilities.F deleted file mode 100644 index f0026bec7..000000000 --- a/grid_gen/global_scvt/src/module_sphere_utilities.F +++ /dev/null @@ -1,959 +0,0 @@ -module sphere_utilities - - contains - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION TRIANGLE_AREA -! -! Given the (latitude, longitude) coordinates of the corners of a triangle, -! plus the radius of the sphere, compute the area of the triangle. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function triangle_area(p1, p2, p3, radius) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2, p3 - real, intent(in) :: radius - - real :: a, b, c, s, e, pii, tanqe - - pii = 2.*asin(1.0) - - a = sphere_distance(p1,p2,radius) - b = sphere_distance(p2,p3,radius) - c = sphere_distance(p3,p1,radius) - s = 0.5*(a+b+c) - - tanqe = sqrt(tan(0.5*s)*tan(0.5*(s-a))*tan(0.5*(s-b))*tan(0.5*(s-c))) - e = 4.*atan(tanqe) - triangle_area = radius*radius*e - -end function triangle_area - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION OBTUSE -! -! Given the (latitude, longitude) coordinates of the corners of a triangle, -! determine if the triangle is obtuse -! -! obtuse.ne.0 then the triangle is obtuse -! value of 1,2,3 means that angle associated with p1,p2,p3 is > 90 -! obtuse = 0 then the triangle is not obtuse -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -integer function obtuse(p1, p2, p3) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2, p3 - - real :: x1(3), x2(3), x3(3), dot, r(3), s(3), rmag, smag - - obtuse = 0 - - call convert_lx(x1(1), x1(2), x1(3), 1.0, p1) - call convert_lx(x2(1), x2(2), x2(3), 1.0, p2) - call convert_lx(x3(1), x3(2), x3(3), 1.0, p3) - - ! test angle formed by x3,x1,x2 - r(:) = x3(:) - x1(:) - s(:) = x2(:) - x1(:) - rmag = sqrt(r(1)**2+r(2)**2+r(3)**2) - smag = sqrt(s(1)**2+s(2)**2+s(3)**2) - r(:) = r(:) / rmag - s(:) = s(:) / smag - dot = r(1)*s(1) + r(2)*s(2) + r(3)*s(3) - if(dot.lt.0) obtuse = 1 - - ! test angle formed by x1,x2,x3 - r(:) = x1(:) - x2(:) - s(:) = x3(:) - x2(:) - rmag = sqrt(r(1)**2+r(2)**2+r(3)**2) - smag = sqrt(s(1)**2+s(2)**2+s(3)**2) - r(:) = r(:) / rmag - s(:) = s(:) / smag - dot = r(1)*s(1) + r(2)*s(2) + r(3)*s(3) - if(dot.lt.0) obtuse = 2 - - ! test angle formed by x2,x3,x1 - r(:) = x2(:) - x3(:) - s(:) = x1(:) - x3(:) - rmag = sqrt(r(1)**2+r(2)**2+r(3)**2) - smag = sqrt(s(1)**2+s(2)**2+s(3)**2) - r(:) = r(:) / rmag - s(:) = s(:) / smag - dot = r(1)*s(1) + r(2)*s(2) + r(3)*s(3) - if(dot.lt.0) obtuse = 3 - -end function obtuse - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION SPHERE_DISTANCE -! -! Given two (latitude, longitude) coordinates on the surface of a sphere, -! plus the radius of the sphere, compute the great circle distance between -! the points. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function sphere_distance(p1, p2, radius) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2 - real, intent(in) :: radius - - real :: arg1 - - arg1 = sqrt( sin(0.5*(p2%lat-p1%lat))**2 + & - cos(p1%lat)*cos(p2%lat)*sin(0.5*(p2%lon-p1%lon))**2 ) - sphere_distance = 2.*radius*asin(arg1) - -end function sphere_distance - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION PLANE_DISTANCE -! -! Given two (latitude, longitude) coordinates on the surface of a sphere, -! plus the radius of the sphere, compute the secant distance between -! the points. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function plane_distance(p1, p2, radius) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2 - real, intent(in) :: radius - - real :: x1, x2, y1, y2, z1, z2 - - z1 = sin(p1%lat) - z2 = sin(p2%lat) - x1 = cos(p1%lon)*cos(p1%lat) - x2 = cos(p2%lon)*cos(p2%lat) - y1 = sin(p1%lon)*cos(p1%lat) - y2 = sin(p2%lon)*cos(p2%lat) - - plane_distance = radius*sqrt((z1-z2)**2+(x1-x2)**2+(y1-y2)**2) - -end function plane_distance - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION ARC_ANGLE -! -! Given two (latitude, longitude) coordinates on the surface of a sphere, -! compute the angle between the points as measured from the origin of the -! sphere. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function arc_angle(p1, p2) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2 - - real :: arg1 - - arg1 = sqrt( sin(0.5*(p2%lat-p1%lat))**2 + & - cos(p1%lat)*cos(p2%lat)*sin(0.5*(p2%lon-p1%lon))**2 ) - arc_angle = 2.*asin(arg1) - -end function arc_angle - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION GREAT_CIRCLE_POINTS -! -! Return n points equally spaced along the great circle arc between (lat1,lon1) -! and (lat2,lon2). These points include the end points of the arc. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine great_circle_points(p1, p2, pl, n) - - use data_types - - implicit none - - integer, intent(in) :: n - type (geo_point), intent(in) :: p1, p2 - type (geo_point), dimension(n), intent(inout) :: pl - - real :: x1, x2, y1, y2, z1, z2 - real :: dx, dl - real :: x, y, z - integer :: i - real :: dtheta, dinc, dt - - real :: pii, rtod - - pii = 2.*asin(1.0) - rtod = 180./pii - -! write(6,*) ' in gcp ',rtod*lat1,rtod*lon1,rtod*lat2,rtod*lon2 - - if (n < 2) then - write(6,*) ' n less than 2 in great_circle_points ' - stop - end if - - if (n == 2) then - pl(1) = p1 - pl(2) = p2 - end if - - dtheta = arc_angle(p1, p2) - dinc = dtheta/float(n-1) - - call convert_lx(x1,y1,z1,1.,p1) - call convert_lx(x2,y2,z2,1.,p2) - -! set the end points - - pl(1) = p1 - pl(n) = p2 - -! write(6,*) ' x1,y1,z1 ',x1,y1,z1 -! write(6,*) ' x2,y2,z2 ',x2,y2,z2 - -! compute the interior points. see notes for derivation - - do i=2,n-1 - dt = float(i-1)*dinc - - if (dt <= 0.5*dtheta) then - dx = 1.-tan(0.5*dtheta-dt)/tan(0.5*dtheta) -! write(6,*) ' case 1 ',dx - x = x1+0.5*dx*(x2-x1) - y = y1+0.5*dx*(y2-y1) - z = z1+0.5*dx*(z2-z1) - else - dt = dtheta-dt - dx = 1.-tan(0.5*dtheta-dt)/tan(0.5*dtheta) -! write(6,*) ' case 2 ',dx - x = x2+0.5*dx*(x1-x2) - y = y2+0.5*dx*(y1-y2) - z = z2+0.5*dx*(z1-z2) - end if - -! write(6,*) ' x,y,z ',x,y,z - - call convert_xl(x,y,z,pl(i)) - enddo - -end subroutine great_circle_points - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE DIVIDE_TRIANGLE -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -!subroutine divide_triangle( p1, p2, p3, pnew) -! -! use data_types -! -! implicit none -! -! type (geo_point), intent(in) :: p1, p2, p3 -! type (geo_point), dimension(6), intent(inout) :: pnew -! -! real :: t_area, area_total, radius -! type (geo_point), dimension(3) :: pts -! type (geo_point) :: c -! -! radius = 1. -! pnew(1) = p1 -! pnew(4) = p2 -! pnew(6) = p3 -! -! call great_circle_points(p1,p2,pts,3) -! pnew(2) = pts(2) -! -! call great_circle_points(p1,p3,pts,3) -! pnew(3) = pts(2) -! -! call great_circle_points(p2,p3,pts,3) -! pnew(5) = pts(2) -! -! -! write(6,*) ' ' -! write(6,*) ' original triangle ' -! write(6,*) p1%lat, p1%lon -! write(6,*) p2%lat, p2%lon -! write(6,*) p3%lat, p3%lon -! -! t_area = triangle_area(p1,p2,p3,radius) -! write(6,*) ' area ',t_area -! call compute_voronoi_corner(p1,p2,p3,c) -! write(6,*) ' voronoi corner ',c%lat,c%lon -! -! area_total = 0. -! -! write(6,*) ' ' -! write(6,*) ' new triangles ' -! -! write(6,*) ' triangle 1 ' -! write(6,*) pnew(1)%lat,pnew(1)%lon -! write(6,*) pnew(1)%lat,pnew(2)%lon -! write(6,*) pnew(1)%lat,pnew(3)%lon -! t_area = triangle_area( pnew(1),pnew(2),pnew(3),radius) -! area_total = area_total + t_area -! write(6,*) ' area ',t_area -! -! write(6,*) ' triangle 2 ' -! write(6,*) pnew(2)%lat,pnew(2)%lon -! write(6,*) pnew(4)%lat,pnew(4)%lon -! write(6,*) pnew(5)%lat,pnew(5)%lon -! t_area = triangle_area( pnew(2),pnew(4),pnew(5),radius) -! area_total = area_total + t_area -! write(6,*) ' area ',t_area -! -! write(6,*) ' triangle 3 ' -! write(6,*) pnew(2)%lat,pnew(2)%lon -! write(6,*) pnew(5)%lat,pnew(5)%lon -! write(6,*) pnew(3)%lat,pnew(3)%lon -! t_area = triangle_area( pnew(2),pnew(5),pnew(3),radius) -! area_total = area_total + t_area -! write(6,*) ' area ',t_area -! -! write(6,*) ' triangle 4 ' -! write(6,*) pnew(3)%lat,pnew(3)%lon -! write(6,*) pnew(5)%lat,pnew(5)%lon -! write(6,*) pnew(6)%lat,pnew(6)%lon -! t_area = triangle_area( pnew(3),pnew(5),pnew(6),radius) -! area_total = area_total + t_area -! write(6,*) ' area ',t_area -! write(6,*) ' total area is ',area_total -! -!end subroutine divide_triangle - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE COMPUTE_VORONOI_CORNER -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -!subroutine compute_voronoi_corner( p0, p1, p2, vc ) -! -! use data_types -! -! implicit none -! -! type (geo_point), intent(in) :: p0, p1, p2 -! type (geo_point), intent(out) :: vc -! -! real :: x0, y0, z0, x1, y1, z1, x2, y2, z2, xc, yc, zc, cabs -! real :: a1, a2, a3, b1, b2, b3 -! real :: dot0 -! -! z0 = sin(p0%lat) -! z1 = sin(p1%lat) -! z2 = sin(p2%lat) -! -! x0 = cos(p0%lon)*cos(p0%lat) -! x1 = cos(p1%lon)*cos(p1%lat) -! x2 = cos(p2%lon)*cos(p2%lat) -! -! y0 = sin(p0%lon)*cos(p0%lat) -! y1 = sin(p1%lon)*cos(p1%lat) -! y2 = sin(p2%lon)*cos(p2%lat) -! -! a1 = x2-x0 -! a2 = y2-y0 -! a3 = z2-z0 -! -! b1 = x1-x0 -! b2 = y1-y0 -! b3 = z1-z0 -! -! -! xc = a2*b3-a3*b2 -! yc = a3*b1-a1*b3 -! zc = a1*b2-a2*b1 -! cabs = sqrt(xc*xc+yc*yc+zc*zc) -! -!! write(6,*) ' cabs = ',cabs -!! write(6,*) ' xc, yc, zc = ',xc,yc,zc -!! write(6,*) ' x0, y0, z0 = ',x0,y0,z0 -!! write(6,*) ' x1, y1, z1 = ',x1,y1,z1 -!! write(6,*) ' x2, y2, z2 = ',x2,y2,z2 -! dot0 = x0*xc+y0*yc+z0*zc -!! write(6,*) ' dot is ',dot0 -! -! if( dot0 < 0.) then ! flip p1 with p2 -! -! z2 = sin(p1%lat) -! z1 = sin(p2%lat) -! -! x2 = cos(p1%lon)*cos(p1%lat) -! x1 = cos(p2%lon)*cos(p2%lat) -! -! y2 = sin(p1%lon)*cos(p1%lat) -! y1 = sin(p2%lon)*cos(p2%lat) -! -! a1 = x2-x0 -! a2 = y2-y0 -! a3 = z2-z0 -! -! b1 = x1-x0 -! b2 = y1-y0 -! b3 = z1-z0 -! -! -! xc = a2*b3-a3*b2 -! yc = a3*b1-a1*b3 -! zc = a1*b2-a2*b1 -! cabs = sqrt(xc*xc+yc*yc+zc*zc) -! -!! write(6,*) ' flipping ' -!! write(6,*) ' cabs = ',cabs -!! write(6,*) ' xc, yc, zc = ',xc,yc,zc -! dot0 = x0*xc+y0*yc+z0*zc -!! write(6,*) ' dot is ',dot0 -! -! end if -! -! -! xc = xc/cabs -! yc = yc/cabs -! zc = zc/cabs -! -! call convert_xl(xc,yc,zc,vc) -! -!end subroutine compute_voronoi_corner - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CONVERT_LX -! -! Convert (lat,lon) to an (x, y, z) location on a sphere with specified radius. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine convert_lx(x, y, z, radius, latlon) - - use data_types - - implicit none - - real, intent(in) :: radius - type (geo_point), intent(in) :: latlon - real, intent(out) :: x, y, z - - z = radius * sin(latlon%lat) - x = radius * cos(latlon%lon) * cos(latlon%lat) - y = radius * sin(latlon%lon) * cos(latlon%lat) - -end subroutine convert_lx - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CONVERT_XL -! -! Convert (x, y, z) to a (lat, lon) location on a sphere with -! radius sqrt(x^2 + y^2 + z^2). -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine convert_xl(x, y, z, latlon) - - use data_types - - implicit none - - real, intent(in) :: x, y, z - type (geo_point), intent(out) :: latlon - - real :: dl, clat, pii, rtod - real :: eps - parameter (eps=1.e-10) - - pii = 2.*asin(1.0) - rtod=180./pii - dl = sqrt(x*x + y*y + z*z) - - latlon%lat = asin(z/dl) - -! check for being close to either pole - - if (abs(x) > eps) then - - if (abs(y) > eps) then - - latlon%lon = atan(abs(y/x)) - - if ((x <= 0.) .and. (y >= 0.)) then - latlon%lon = pii-latlon%lon - else if ((x <= 0.) .and. (y < 0.)) then - latlon%lon = latlon%lon+pii - else if ((x >= 0.) .and. (y <= 0.)) then - latlon%lon = 2*pii-latlon%lon - end if - - else ! we're either on longitude 0 or 180 - - if (x > 0) then - latlon%lon = 0. - else - latlon%lon = pii - end if - - end if - - else if (abs(y) > eps) then - - if (y > 0) then - latlon%lon = pii/2. - else - latlon%lon = 3.*pii/2. - end if - - else ! we are at a pole - - latlon%lon = 0. - - end if - -end subroutine convert_xl - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE GC_INTERSECT -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine gc_intersect(p0, p1, p2, p3, pc) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p0, p1, p2, p3 - type (geo_point), intent(out) :: pc - - real :: x0, y0, z0, x1, y1, z1, x2, y2, z2, x3, y3, z3 - real :: n1, n2, n3, m1, m2, m3 - real :: xc, yc, zc, dot - real, parameter :: radius=1.0 - - call convert_lx(x0,y0,z0,radius,p0) - call convert_lx(x1,y1,z1,radius,p1) - call convert_lx(x2,y2,z2,radius,p2) - call convert_lx(x3,y3,z3,radius,p3) - - n1 = (y0 * z1 - y1 * z0) - n2 = -(x0 * z1 - x1 * z0) - n3 = (x0 * y1 - x1 * y0) - - m1 = (y2 * z3 - y3 * z2) - m2 = -(x2 * z3 - x3 * z2) - m3 = (x2 * y3 - x3 * y2) - - xc = (n2 * m3 - n3 * m2) - yc = -(n1 * m3 - n3 * m1) - zc = (n1 * m2 - n2 * m1) - - dot = x0*xc + y0*yc + z0*zc - - if (dot < 0.0) then - xc = -xc - yc = -yc - zc = -zc - end if - - call convert_xl(xc,yc,zc,pc) - -end subroutine gc_intersect - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION POS_ANG -! -! Normalize an angle, given in radians, to lie in the interval [0,2*PI]. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function pos_ang(angle) - - implicit none - - real, intent(in) :: angle - - real :: pii - - pii = 2.*asin(1.0) - pos_ang = angle - - if(angle > 2.*pii) then - pos_ang = angle - 2.*pii - else if(angle < 0.) then - pos_ang = angle + 2.*pii - end if - -end function pos_ang - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION MERIDIAN_ANGLE -! -! Find the angle between the meridian that intersects point (lat1,lon1) -! and the great circle passing through points (lat1,lon1) (lat2,lon2). -! (lat1,lon1) is the vertex of the angle. -! -! Convention: zero points north, 90 points west, -90 point east, -! points south 180, -180 -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function meridian_angle(p1, p2) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2 - -type (geo_point) :: np - - real :: pii, da, db, dc - type (geo_point) :: p3 - real :: cosa - real :: eps - parameter (eps = 1.e-04) -real :: ax, ay, az -real :: bx, by, bz -real :: cx, cy, cz - -np = p1 -np%lat = np%lat + 0.05 - -call convert_lx(ax, ay, az, 1.0, p1) -call convert_lx(bx, by, bz, 1.0, np) -call convert_lx(cx, cy, cz, 1.0, p2) - -meridian_angle = plane_angle(ax, ay, az, bx, by, bz, cx, cy, cz, ax, ay, az) -return - - if (p1%lon == p2%lon) then - - meridian_angle = 0.0 - - else - - pii = 2.*asin(1.0) - dc = arc_angle(p1,p2) - - p3%lon = p1%lon - if (p1%lat + dc <= pii/2.0) then - p3%lat = p1%lat+dc - else - p3%lat = p1%lat-dc - end if - db = arc_angle(p1,p3) - da = arc_angle(p2,p3) - -! see spherical trig section on online wolfram pages - eq(11) -> - - cosa = max(-1.,min(1.,(cos(da)-cos(db)*cos(dc))/(sin(db)*sin(dc)))) - meridian_angle = acos(cosa) - - - if (((p2%lon > p1%lon) .and. (p2%lon - p1%lon <= pii)) .or. & - ((p2%lon < p1%lon) .and. (p1%lon - p2%lon >= pii))) then - meridian_angle = -abs(meridian_angle) - else - meridian_angle = abs(meridian_angle) - end if - - end if - -end function meridian_angle - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE CENTER_OF_MASS -! -! Find centriod of the triangle whose corners are at (lat1,lon1), (lat2,lon2), -! and (lat3,lon3). -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine center_of_mass(p1, p2, p3, pc) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2, p3 - type (geo_point), intent(out) :: pc - - real :: x1, x2, x3, xc - real :: y1, y2, y3, yc - real :: z1, z2, z3, zc - - call convert_lx(x1,y1,z1,1.,p1) - call convert_lx(x2,y2,z2,1.,p2) - call convert_lx(x3,y3,z3,1.,p3) - - xc = (x1+x2+x3)/3. - yc = (y1+y2+y3)/3. - zc = (z1+z2+z3)/3. - - call convert_xl(xc,yc,zc,pc) - -end subroutine center_of_mass - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE DIVIDE_TRIANGLE -! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine divide_triangle(p1, p2, p3, n, p) - - use data_types - - implicit none - - type (geo_point), intent(in) :: p1, p2, p3 - integer, intent(in) :: n - type (geo_point), dimension(3,n), intent(out) :: p - - integer :: i, j, k - integer :: glevel ! Level of decomposition - type (geo_point), allocatable, dimension(:) :: p1p2, p1p3 - type (geo_point), allocatable, dimension(:,:) :: line - - glevel = nint(log(real(n)) / log(4.0)) ! Each subdivision gives four times the number of - ! triangles, so log4(n) gives the level decomposition - - glevel = (2 ** glevel) + 1 - allocate(line(glevel, glevel)) - allocate(p1p2(glevel)) - allocate(p1p3(glevel)) - - call great_circle_points(p1, p2, p1p2, glevel) - call great_circle_points(p1, p3, p1p3, glevel) - - line(1,1) = p1 - line(1,2) = p1p2(2) - line(2,2) = p1p3(2) - - do i = 3,glevel - call great_circle_points(p1p2(i), p1p3(i), line(:,i), i) -!do j=1,i -!write(0,*) j,i,' P ',line(j,i)%lat*180./3.14159, line(j,i)%lon*180./3.14159 -!end do - end do - - k = 1 - do i = 1,glevel-1 - do j = 1,i - p(1,k) = line(j,i) - p(2,k) = line(j,i+1) - p(3,k) = line(j+1,i+1) -!write(0,*) j,i, ' - ',p(1,k)%lat*180./3.14159,p(1,k)%lon*180./3.14159 -!write(0,*) j,i+1, ' - ',p(2,k)%lat*180./3.14159,p(2,k)%lon*180./3.14159 -!write(0,*) j+1,i+1, ' - ',p(3,k)%lat*180./3.14159,p(3,k)%lon*180./3.14159 - k = k + 1 - end do - end do - -!write(0,*) '-----------' - do i = glevel,3,-1 - do j = 2,i-1 - p(1,k) = line(j,i) - p(2,k) = line(j,i-1) - p(3,k) = line(j-1,i-1) -!write(0,*) j,i, ' - ',p(1,k)%lat*180./3.14159,p(1,k)%lon*180./3.14159 -!write(0,*) j,i-1, ' - ',p(2,k)%lat*180./3.14159,p(2,k)%lon*180./3.14159 -!write(0,*) j-1,i-1, ' - ',p(3,k)%lat*180./3.14159,p(3,k)%lon*180./3.14159 - k = k + 1 - end do - end do - - deallocate(line) - deallocate(p1p2) - deallocate(p1p3) - -end subroutine divide_triangle - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE POINT_TO_PLANE -! -! Find projection (xp, yp, zp) of a point (Qx,Qy,Qz) onto the plane defined by -! the equation ax+by+cz+d=0 -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine point_to_plane(a, b, c, d, Qx, Qy, Qz, xp, yp, zp) - - implicit none - - real, intent(in) :: a, b, c, d ! The coefficients in the equation of the plane - real, intent(in) :: Qx, Qy, Qz ! The coordinates of the point Q to be projected to the plane - real, intent(out) :: xp, yp, zp ! The coordinates of the point projected in the plane - - real :: Px, Py, Pz ! A point P in the plane ax + by + cz + d = 0 - real :: PQx, PQy, PQz ! Components of the vector from P to Q - real :: PQn ! The dot product of PQ and the vector normal to the plane - real :: m2 ! The magnitude and squared magnitude of the vector n normal to the plane - - m2 = (a**2.0 + b**2.0 + c**2.0) - - Px = -d*a/m2 - Py = -d*b/m2 - Pz = -d*c/m2 - - PQx = Qx - Px - PQy = Qy - Py - PQz = Qz - Pz - - PQn = PQx * a + PQy * b + PQz * c - - ! . Q - ! n ^ / - ! | / - ! |/ - ! ----------.------------------- - ! P - - xp = Qx - PQn * a / m2 - yp = Qy - PQn * b / m2 - zp = Qz - PQn * c / m2 - -end subroutine point_to_plane - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE POINT_TO_SPHERE -! -! Find projection (xp, yp, zp) of a point (Qx,Qy,Qz) in the plane defined by -! the equation ax+by+cz+d=0 onto the surface of the sphere with radius r -! centered at the origin. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine point_to_sphere(a, b, c, d, r, Qx, Qy, Qz, xp, yp, zp) - - implicit none - - real, intent(in) :: a, b, c, d ! The coefficients in the equation of the plane - real, intent(in) :: r ! The radius of the sphere - real, intent(in) :: Qx, Qy, Qz ! The coordinates of the point Q to be projected to the sphere - real, intent(out) :: xp, yp, zp ! The coordinates of the point projected to the sphere - - real :: aa, bb, cc ! Coefficients of quadratic equation - real :: disc, t1, t2 - - ! Solve for the interesection of the line (Qx - at, Qy - bt, Qz - ct) and the - ! sphere x^2 + y^2 + z^2 - r^2 = 0 - aa = a**2.0 + b**2.0 + c**2.0 - bb = -2.0*(Qx*a + Qy*b + Qz*c) - cc = Qx**2.0 + Qy**2.0 + Qz**2.0 - r**2.0 - - disc = bb**2.0 - 4.0*aa*cc - - if (disc < 0.0) then ! Point has no projection on the surface of the sphere - xp = 0.0 - yp = 0.0 - zp = 0.0 - else if (disc == 0.0) then ! Point has exactly one projection (line through point and - t1 = -bb / (2.0*aa) - xp = Qx - a*t1 ! and normal to plane is tangent to sphere - yp = Qy - b*t1 - zp = Qz - c*t1 - else ! Point has two projections; choose the one that is closest - t1 = (-bb + sqrt(disc)) / (2.0*aa) - t2 = (-bb - sqrt(disc)) / (2.0*aa) - if (abs(t1) <= abs(t2)) then - xp = Qx - a*t1 - yp = Qy - b*t1 - zp = Qz - c*t1 - else - xp = Qx - a*t2 - yp = Qy - b*t2 - zp = Qz - c*t2 - end if - end if - -end subroutine point_to_sphere - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! SUBROUTINE ROTATE_ABOUT_VECTOR -! -! Rotates the point (x,y,z) through an angle theta about the vector -! originating at (a, b, c) and having direction (u, v, w). -! -! Reference: http://inside.mines.edu/~gmurray/ArbitraryAxisRotation/ArbitraryAxisRotation.html -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -subroutine rotate_about_vector(x, y, z, theta, a, b, c, u, v, w, xp, yp, zp) - - implicit none - - real, intent(in) :: x, y, z, theta, a, b, c, u, v, w - real, intent(out) :: xp, yp, zp - - real :: vw2, uw2, uv2 - real :: m - - vw2 = v**2.0 + w**2.0 - uw2 = u**2.0 + w**2.0 - uv2 = u**2.0 + v**2.0 - m = sqrt(u**2.0 + v**2.0 + w**2.0) - - xp = (a*vw2 + u*(-b*v-c*w+u*x+v*y+w*z) + ((x-a)*vw2+u*(b*v+c*w-v*y-w*z))*cos(theta) + m*(-c*v+b*w-w*y+v*z)*sin(theta))/m**2.0 - yp = (b*uw2 + v*(-a*u-c*w+u*x+v*y+w*z) + ((y-b)*uw2+v*(a*u+c*w-u*x-w*z))*cos(theta) + m*( c*u-a*w+w*x-u*z)*sin(theta))/m**2.0 - zp = (c*uv2 + w*(-a*u-b*v+u*x+v*y+w*z) + ((z-c)*uv2+w*(a*u+b*v-u*x-v*y))*cos(theta) + m*(-b*u+a*v-v*x+u*y)*sin(theta))/m**2.0 - -end subroutine rotate_about_vector - - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! FUNCTION PLANE_ANGLE -! -! Computes the angle between vectors AB and AC, given points A, B, and C, and -! a vector (u,v,w) normal to the plane. -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -real function plane_angle(ax, ay, az, bx, by, bz, cx, cy, cz, u, v, w) - - implicit none - - real, intent(in) :: ax, ay, az, bx, by, bz, cx, cy, cz, u, v, w - - real :: ABx, ABy, ABz ! The components of the vector AB - real :: mAB ! The magnitude of AB - real :: ACx, ACy, ACz ! The components of the vector AC - real :: mAC ! The magnitude of AC - - real :: Dx ! The i-components of the cross product AB x AC - real :: Dy ! The j-components of the cross product AB x AC - real :: Dz ! The k-components of the cross product AB x AC - - real :: cos_angle - - ABx = bx - ax - ABy = by - ay - ABz = bz - az - mAB = sqrt(ABx**2.0 + ABy**2.0 + ABz**2.0) - - ACx = cx - ax - ACy = cy - ay - ACz = cz - az - mAC = sqrt(ACx**2.0 + ACy**2.0 + ACz**2.0) - - - Dx = (ABy * ACz) - (ABz * ACy) - Dy = -((ABx * ACz) - (ABz * ACx)) - Dz = (ABx * ACy) - (ABy * ACx) - - cos_angle = (ABx*ACx + ABy*ACy + ABz*ACz) / (mAB * mAC) - - if (cos_angle < -1.0) then - cos_angle = -1.0 - else if (cos_angle > 1.0) then - cos_angle = 1.0 - end if - - if ((Dx*u + Dy*v + Dz*w) >= 0.0) then - plane_angle = acos(cos_angle) - else - plane_angle = -acos(cos_angle) - end if - -end function plane_angle - -end module sphere_utilities diff --git a/grid_gen/global_scvt/src/module_voronoi_utils.F b/grid_gen/global_scvt/src/module_voronoi_utils.F deleted file mode 100644 index 3079cd320..000000000 --- a/grid_gen/global_scvt/src/module_voronoi_utils.F +++ /dev/null @@ -1,113 +0,0 @@ -module voronoi_utils - - use grid_constants - use stripack - - contains - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_DT - ! - ! Compute the Delaunay triangulation of a set of lat/lon locations. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_dt(rlat, rlon, n, ltri, nrow, ntmx, nt) - - implicit none - - integer, intent(in) :: n, nrow, ntmx - integer, intent(inout) :: nt - integer, dimension(nrow, ntmx), intent(in) :: ltri - real, dimension(n), intent(in) :: rlat, rlon - - integer :: ierr, lnew, nscr - integer, dimension(n) :: near, next - integer, dimension(n) :: lend - integer, dimension(6*n+12) :: list, lptr - real, dimension(n) :: x, y, z, dist - - nscr = 6*n+12 - - call trans(n, rlat, rlon, x, y, z) - - write(0,*) 'started TRMESH' - call trmesh(n, x, y, z, list, lptr, lend, lnew, near, next, dist, ierr) - if (ierr /= 0) then - write(0,*) 'Error: TRMESH returned error code ',ierr - end if - write(0,*) 'finished TRMESH' - - write(0,*) 'started TRLIST' - call trlist(n, list, lptr, lend, nrow, nt, ltri, ierr) - if (ierr /= 0) then - write(0,*) 'Error: TRLIST returned error code ',ierr - end if - write(0,*) 'finished TRLIST' - - end subroutine compute_dt - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE COMPUTE_VC - ! - ! Compute the Voronoi corners of a set of lat/lon locations. - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine compute_vc(rlat, rlon, n, nrow, ntmx, list, lptr, lend, listc, vclat, vclon, nvc) - - implicit none - - integer, intent(in) :: n, nrow, ntmx, nvc - integer, dimension(nvc), intent(inout) :: list, lptr, listc - real, dimension(nvc), intent(inout) :: vclat, vclon - integer, dimension(n), intent(inout) :: lend - real, dimension(n), intent(in) :: rlat, rlon - - integer :: ierr, lnew, nb - integer, dimension(n) :: near, next - integer, dimension(nrow, ntmx) :: ltri - real, dimension(n) :: x, y, z, dist - real, dimension(nvc) :: xc, yc, zc, rc - - if (nvc < 6*n-12) then - write(0,*) 'Error: Argument nvc to COMPUTE_VC must be at least 6*n+12' - return - end if - - call trans(n, rlat, rlon, x, y, z) - - call trmesh(n, x, y, z, list, lptr, lend, lnew, near, next, dist, ierr) - if (ierr /= 0) then - write(0,*) 'Error: TRMESH returned error code ',ierr - end if - - call crlist(n, ntmx, x, y, z, list, lend, lptr, lnew, ltri, listc, nb, xc, yc, zc, rc, ierr) - if (ierr /= 0) then - write(0,*) 'Error: CRLIST returned error code ',ierr - end if - - call trans_inv(xc, yc, zc, vclat, vclon, nvc) - - end subroutine compute_vc - - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! SUBROUTINE TRANS_INV - ! - !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - subroutine trans_inv(x, y, z, lat, lon, n) - - implicit none - - integer, intent(in) :: n - real, dimension(n), intent(in) :: x, y, z - real, dimension(n), intent(out) :: lat, lon - - integer :: i - - do i=1,n - lat(i) = (pii/2.0 - acos(z(i))) - lon(i) = atan2(y(i),x(i)) - end do - - end subroutine trans_inv - -end module voronoi_utils diff --git a/grid_gen/global_scvt/src/module_write_netcdf.F b/grid_gen/global_scvt/src/module_write_netcdf.F deleted file mode 100644 index d386a4ad4..000000000 --- a/grid_gen/global_scvt/src/module_write_netcdf.F +++ /dev/null @@ -1,646 +0,0 @@ -module write_netcdf - - use grid_params - - integer :: wr_ncid - integer :: wrDimIDTime - integer :: wrDimIDnCells - integer :: wrDimIDnEdges - integer :: wrDimIDnVertices - integer :: wrDimIDmaxEdges - integer :: wrDimIDmaxEdges2 - integer :: wrDimIDTWO - integer :: wrDimIDvertexDegree - integer :: wrDimIDnVertLevels - integer :: wrDimIDnTracers - integer :: wrVarIDlatCell - integer :: wrVarIDlonCell - integer :: wrVarIDmeshDensity - integer :: wrVarIDxCell - integer :: wrVarIDyCell - integer :: wrVarIDzCell - integer :: wrVarIDindexToCellID - integer :: wrVarIDlatEdge - integer :: wrVarIDlonEdge - integer :: wrVarIDxEdge - integer :: wrVarIDyEdge - integer :: wrVarIDzEdge - integer :: wrVarIDindexToEdgeID - integer :: wrVarIDlatVertex - integer :: wrVarIDlonVertex - integer :: wrVarIDxVertex - integer :: wrVarIDyVertex - integer :: wrVarIDzVertex - integer :: wrVarIDindexToVertexID - integer :: wrVarIDcellsOnEdge - integer :: wrVarIDnEdgesOnCell - integer :: wrVarIDnEdgesOnEdge - integer :: wrVarIDedgesOnCell - integer :: wrVarIDedgesOnEdge - integer :: wrVarIDweightsOnEdge - integer :: wrVarIDdvEdge - integer :: wrVarIDdv1Edge - integer :: wrVarIDdv2Edge - integer :: wrVarIDdcEdge - integer :: wrVarIDangleEdge - integer :: wrVarIDareaCell - integer :: wrVarIDareaTriangle - integer :: wrVarIDcellsOnCell - integer :: wrVarIDverticesOnCell - integer :: wrVarIDverticesOnEdge - integer :: wrVarIDedgesOnVertex - integer :: wrVarIDcellsOnVertex - integer :: wrVarIDkiteAreasOnVertex - integer :: wrVarIDfEdge - integer :: wrVarIDfVertex - integer :: wrVarIDh_s - integer :: wrVarIDu - integer :: wrVarIDv - integer :: wrVarIDh - integer :: wrVarIDvh - integer :: wrVarIDcirculation - integer :: wrVarIDvorticity - integer :: wrVarIDke - integer :: wrVarIDtracers - - integer :: wrLocalnCells - integer :: wrLocalnEdges - integer :: wrLocalnVertices - integer :: wrLocalmaxEdges - integer :: wrLocalnVertLevels - integer :: wrLocalnTracers - - contains - - subroutine write_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - nVertLevels, & - nTracers & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: nCells - integer, intent(in) :: nEdges - integer, intent(in) :: nVertices - integer, intent(in) :: maxEdges - integer, intent(in) :: nVertLevels - integer, intent(in) :: nTracers - - integer :: nferr - integer, dimension(10) :: dimlist - real (kind=8) :: sphere_radius - character (len=16) :: on_a_sphere - - - wrLocalnCells = nCells - wrLocalnEdges = nEdges - wrLocalnVertices = nVertices - wrLocalmaxEdges = maxEdges - wrLocalnVertLevels = nVertLevels - wrLocalnTracers = nTracers - - on_a_sphere = 'YES ' - sphere_radius = 1.0 - - nferr = nf_create('grid.nc', IOR(NF_CLOBBER,NF_64BIT_OFFSET), wr_ncid) - - ! - ! Write Namlist information - ! - - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'on_a_sphere', 16, on_a_sphere) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'sphere_radius', NF_DOUBLE, 1, sphere_radius) - nferr = nf_put_att_int(wr_ncid, NF_GLOBAL, 'np', NF_INT, 1, np) - nferr = nf_put_att_int(wr_ncid, NF_GLOBAL, 'n_scvt_iterations', NF_INT, 1, n_scvt_iterations) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'eps', NF_DOUBLE, 1, eps) - if(l2_conv) then - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'Convergence',2,'L2') - elseif(inf_conv) then - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'Convergence',3,'INF') - else - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'Convergence',7,'MaxIter') - endif - - ! - ! Define dimensions - ! - nferr = nf_def_dim(wr_ncid, 'nCells', nCells, wrDimIDnCells) - nferr = nf_def_dim(wr_ncid, 'nEdges', nEdges, wrDimIDnEdges) - nferr = nf_def_dim(wr_ncid, 'nVertices', nVertices, wrDimIDnVertices) - nferr = nf_def_dim(wr_ncid, 'maxEdges', maxEdges, wrDimIDmaxEdges) - nferr = nf_def_dim(wr_ncid, 'maxEdges2', 2*maxEdges, wrDimIDmaxEdges2) - nferr = nf_def_dim(wr_ncid, 'TWO', 2, wrDimIDTWO) - nferr = nf_def_dim(wr_ncid, 'vertexDegree', 3, wrDimIDvertexDegree) - nferr = nf_def_dim(wr_ncid, 'nVertLevels', nVertLevels, wrDimIDnVertLevels) - nferr = nf_def_dim(wr_ncid, 'nTracers', nTracers, wrDimIDnTracers) - nferr = nf_def_dim(wr_ncid, 'Time', NF_UNLIMITED, wrDimIDTime) - - ! - ! Define variables - ! - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'latCell', NF_DOUBLE, 1, dimlist, wrVarIDlatCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'lonCell', NF_DOUBLE, 1, dimlist, wrVarIDlonCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'meshDensity', NF_DOUBLE, 1, dimlist, wrVarIDmeshDensity) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'xCell', NF_DOUBLE, 1, dimlist, wrVarIDxCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'yCell', NF_DOUBLE, 1, dimlist, wrVarIDyCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'zCell', NF_DOUBLE, 1, dimlist, wrVarIDzCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'indexToCellID', NF_INT, 1, dimlist, wrVarIDindexToCellID) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'latEdge', NF_DOUBLE, 1, dimlist, wrVarIDlatEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'lonEdge', NF_DOUBLE, 1, dimlist, wrVarIDlonEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'xEdge', NF_DOUBLE, 1, dimlist, wrVarIDxEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'yEdge', NF_DOUBLE, 1, dimlist, wrVarIDyEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'zEdge', NF_DOUBLE, 1, dimlist, wrVarIDzEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'indexToEdgeID', NF_INT, 1, dimlist, wrVarIDindexToEdgeID) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'latVertex', NF_DOUBLE, 1, dimlist, wrVarIDlatVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'lonVertex', NF_DOUBLE, 1, dimlist, wrVarIDlonVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'xVertex', NF_DOUBLE, 1, dimlist, wrVarIDxVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'yVertex', NF_DOUBLE, 1, dimlist, wrVarIDyVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'zVertex', NF_DOUBLE, 1, dimlist, wrVarIDzVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'indexToVertexID', NF_INT, 1, dimlist, wrVarIDindexToVertexID) - dimlist( 1) = wrDimIDTWO - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'cellsOnEdge', NF_INT, 2, dimlist, wrVarIDcellsOnEdge) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'nEdgesOnCell', NF_INT, 1, dimlist, wrVarIDnEdgesOnCell) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'nEdgesOnEdge', NF_INT, 1, dimlist, wrVarIDnEdgesOnEdge) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'edgesOnCell', NF_INT, 2, dimlist, wrVarIDedgesOnCell) - dimlist( 1) = wrDimIDmaxEdges2 - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'edgesOnEdge', NF_INT, 2, dimlist, wrVarIDedgesOnEdge) - dimlist( 1) = wrDimIDmaxEdges2 - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'weightsOnEdge', NF_DOUBLE, 2, dimlist, wrVarIDweightsOnEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dvEdge', NF_DOUBLE, 1, dimlist, wrVarIDdvEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dv1Edge', NF_DOUBLE, 1, dimlist, wrVarIDdv1Edge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dv2Edge', NF_DOUBLE, 1, dimlist, wrVarIDdv2Edge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'dcEdge', NF_DOUBLE, 1, dimlist, wrVarIDdcEdge) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'angleEdge', NF_DOUBLE, 1, dimlist, wrVarIDangleEdge) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'areaCell', NF_DOUBLE, 1, dimlist, wrVarIDareaCell) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'areaTriangle', NF_DOUBLE, 1, dimlist, wrVarIDareaTriangle) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'cellsOnCell', NF_INT, 2, dimlist, wrVarIDcellsOnCell) - dimlist( 1) = wrDimIDmaxEdges - dimlist( 2) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'verticesOnCell', NF_INT, 2, dimlist, wrVarIDverticesOnCell) - dimlist( 1) = wrDimIDTWO - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'verticesOnEdge', NF_INT, 2, dimlist, wrVarIDverticesOnEdge) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'edgesOnVertex', NF_INT, 2, dimlist, wrVarIDedgesOnVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'cellsOnVertex', NF_INT, 2, dimlist, wrVarIDcellsOnVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'kiteAreasOnVertex', NF_DOUBLE, 2, dimlist, wrVarIDkiteAreasOnVertex) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'fEdge', NF_DOUBLE, 1, dimlist, wrVarIDfEdge) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'fVertex', NF_DOUBLE, 1, dimlist, wrVarIDfVertex) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'h_s', NF_DOUBLE, 1, dimlist, wrVarIDh_s) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'u', NF_DOUBLE, 3, dimlist, wrVarIDu) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'v', NF_DOUBLE, 3, dimlist, wrVarIDv) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'h', NF_DOUBLE, 3, dimlist, wrVarIDh) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'vh', NF_DOUBLE, 3, dimlist, wrVarIDvh) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'circulation', NF_DOUBLE, 3, dimlist, wrVarIDcirculation) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'vorticity', NF_DOUBLE, 3, dimlist, wrVarIDvorticity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'ke', NF_DOUBLE, 3, dimlist, wrVarIDke) - dimlist( 1) = wrDimIDnTracers - dimlist( 2) = wrDimIDnVertLevels - dimlist( 3) = wrDimIDnCells - dimlist( 4) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'tracers', NF_DOUBLE, 4, dimlist, wrVarIDtracers) - - nferr = nf_enddef(wr_ncid) - - end subroutine write_netcdf_init - - - subroutine write_netcdf_fields( & - time, & - latCell, & - lonCell, & - meshDensity, & - xCell, & - yCell, & - zCell, & - indexToCellID, & - latEdge, & - lonEdge, & - xEdge, & - yEdge, & - zEdge, & - indexToEdgeID, & - latVertex, & - lonVertex, & - xVertex, & - yVertex, & - zVertex, & - indexToVertexID, & - cellsOnEdge, & - nEdgesOnCell, & - nEdgesOnEdge, & - edgesOnCell, & - edgesOnEdge, & - weightsOnEdge, & - dvEdge, & - dv1Edge, & - dv2Edge, & - dcEdge, & - angleEdge, & - areaCell, & - areaTriangle, & - cellsOnCell, & - verticesOnCell, & - verticesOnEdge, & - edgesOnVertex, & - cellsOnVertex, & - kiteAreasOnVertex, & - fEdge, & - fVertex, & - h_s, & - u, & - v, & - h, & - vh, & - circulation, & - vorticity, & - ke, & - tracers & - ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: time - real (kind=RKIND), dimension(:), intent(in) :: latCell - real (kind=RKIND), dimension(:), intent(in) :: lonCell - real (kind=RKIND), dimension(:), intent(in) :: meshDensity - real (kind=RKIND), dimension(:), intent(in) :: xCell - real (kind=RKIND), dimension(:), intent(in) :: yCell - real (kind=RKIND), dimension(:), intent(in) :: zCell - integer, dimension(:), intent(in) :: indexToCellID - real (kind=RKIND), dimension(:), intent(in) :: latEdge - real (kind=RKIND), dimension(:), intent(in) :: lonEdge - real (kind=RKIND), dimension(:), intent(in) :: xEdge - real (kind=RKIND), dimension(:), intent(in) :: yEdge - real (kind=RKIND), dimension(:), intent(in) :: zEdge - integer, dimension(:), intent(in) :: indexToEdgeID - real (kind=RKIND), dimension(:), intent(in) :: latVertex - real (kind=RKIND), dimension(:), intent(in) :: lonVertex - real (kind=RKIND), dimension(:), intent(in) :: xVertex - real (kind=RKIND), dimension(:), intent(in) :: yVertex - real (kind=RKIND), dimension(:), intent(in) :: zVertex - integer, dimension(:), intent(in) :: indexToVertexID - integer, dimension(:,:), intent(in) :: cellsOnEdge - integer, dimension(:), intent(in) :: nEdgesOnCell - integer, dimension(:), intent(in) :: nEdgesOnEdge - integer, dimension(:,:), intent(in) :: edgesOnCell - integer, dimension(:,:), intent(in) :: edgesOnEdge - real (kind=RKIND), dimension(:,:), intent(in) :: weightsOnEdge - real (kind=RKIND), dimension(:), intent(in) :: dvEdge - real (kind=RKIND), dimension(:), intent(in) :: dv1Edge - real (kind=RKIND), dimension(:), intent(in) :: dv2Edge - real (kind=RKIND), dimension(:), intent(in) :: dcEdge - real (kind=RKIND), dimension(:), intent(in) :: angleEdge - real (kind=RKIND), dimension(:), intent(in) :: areaCell - real (kind=RKIND), dimension(:), intent(in) :: areaTriangle - integer, dimension(:,:), intent(in) :: cellsOnCell - integer, dimension(:,:), intent(in) :: verticesOnCell - integer, dimension(:,:), intent(in) :: verticesOnEdge - integer, dimension(:,:), intent(in) :: edgesOnVertex - integer, dimension(:,:), intent(in) :: cellsOnVertex - real (kind=RKIND), dimension(:,:), intent(in) :: kiteAreasOnVertex - real (kind=RKIND), dimension(:), intent(in) :: fEdge - real (kind=RKIND), dimension(:), intent(in) :: fVertex - real (kind=RKIND), dimension(:), intent(in) :: h_s - real (kind=RKIND), dimension(:,:,:), intent(in) :: u - real (kind=RKIND), dimension(:,:,:), intent(in) :: v - real (kind=RKIND), dimension(:,:,:), intent(in) :: h - real (kind=RKIND), dimension(:,:,:), intent(in) :: vh - real (kind=RKIND), dimension(:,:,:), intent(in) :: circulation - real (kind=RKIND), dimension(:,:,:), intent(in) :: vorticity - real (kind=RKIND), dimension(:,:,:), intent(in) :: ke - real (kind=RKIND), dimension(:,:,:,:), intent(in) :: tracers - - integer :: nferr - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - start1(1) = 1 - - start2(1) = 1 - start2(2) = 1 - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start4(1) = 1 - start4(2) = 1 - start4(3) = 1 - start4(4) = 1 - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatCell, start1, count1, latCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonCell, start1, count1, lonCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDmeshDensity, start1, count1, meshDensity) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDxCell, start1, count1, xCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDyCell, start1, count1, yCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDzCell, start1, count1, zCell) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToCellID, start1, count1, indexToCellID) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatEdge, start1, count1, latEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonEdge, start1, count1, lonEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDxEdge, start1, count1, xEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDyEdge, start1, count1, yEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDzEdge, start1, count1, zEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToEdgeID, start1, count1, indexToEdgeID) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDlatVertex, start1, count1, latVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDlonVertex, start1, count1, lonVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDxVertex, start1, count1, xVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDyVertex, start1, count1, yVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDzVertex, start1, count1, zVertex) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDindexToVertexID, start1, count1, indexToVertexID) - - start2(2) = 1 - count2( 1) = 2 - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnEdge, start2, count2, cellsOnEdge) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDnEdgesOnCell, start1, count1, nEdgesOnCell) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDnEdgesOnEdge, start1, count1, nEdgesOnEdge) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnCell, start2, count2, edgesOnCell) - - start2(2) = 1 - count2( 1) = 2*wrLocalmaxEdges - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnEdge, start2, count2, edgesOnEdge) - - start2(2) = 1 - count2( 1) = 2*wrLocalmaxEdges - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDweightsOnEdge, start2, count2, weightsOnEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdvEdge, start1, count1, dvEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdv1Edge, start1, count1, dv1Edge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdv2Edge, start1, count1, dv2Edge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDdcEdge, start1, count1, dcEdge) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDangleEdge, start1, count1, angleEdge) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDareaCell, start1, count1, areaCell) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDareaTriangle, start1, count1, areaTriangle) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnCell, start2, count2, cellsOnCell) - - start2(2) = 1 - count2( 1) = wrLocalmaxEdges - count2( 2) = wrLocalnCells - nferr = nf_put_vara_int(wr_ncid, wrVarIDverticesOnCell, start2, count2, verticesOnCell) - - start2(2) = 1 - count2( 1) = 2 - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDverticesOnEdge, start2, count2, verticesOnEdge) - - start2(2) = 1 - count2( 1) = 3 - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDedgesOnVertex, start2, count2, edgesOnVertex) - - start2(2) = 1 - count2( 1) = 3 - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnVertex, start2, count2, cellsOnVertex) - - start2(2) = 1 - count2( 1) = 3 - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDkiteAreasOnVertex, start2, count2, kiteAreasOnVertex) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDfEdge, start1, count1, fEdge) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDfVertex, start1, count1, fVertex) - - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDh_s, start1, count1, h_s) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDu, start3, count3, u) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDv, start3, count3, v) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDh, start3, count3, h) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDvh, start3, count3, vh) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnVertices - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDcirculation, start3, count3, circulation) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnVertices - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDvorticity, start3, count3, vorticity) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDke, start3, count3, ke) - - start4(4) = time - count4( 1) = wrLocalnTracers - count4( 2) = wrLocalnVertLevels - count4( 3) = wrLocalnCells - count4( 4) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDtracers, start4, count4, tracers) - - - end subroutine write_netcdf_fields - - - subroutine write_netcdf_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(wr_ncid) - - end subroutine write_netcdf_finalize - -end module write_netcdf - diff --git a/grid_gen/icosdiv/Makefile b/grid_gen/icosdiv/Makefile deleted file mode 100644 index 6beafa2ff..000000000 --- a/grid_gen/icosdiv/Makefile +++ /dev/null @@ -1,18 +0,0 @@ -CC = g++ - -all: icosdiv - -icosdiv: icosdiv.o Point.o Triangle.o - $(CC) -o icosdiv icosdiv.o Point.o Triangle.o - -icosdiv.o: Point.o Triangle.o icosdiv.cc - $(CC) -c icosdiv.cc - -Point.o: Point.cc Point.h - $(CC) -c Point.cc - -Triangle.o: Triangle.cc Triangle.h - $(CC) -c Triangle.cc - -clean: - rm -f icosdiv.o Point.o Triangle.o icosdiv diff --git a/grid_gen/icosdiv/Point.cc b/grid_gen/icosdiv/Point.cc deleted file mode 100644 index 8a4f63575..000000000 --- a/grid_gen/icosdiv/Point.cc +++ /dev/null @@ -1,144 +0,0 @@ -#include "Point.h" - -Point::Point() -{ - x = 0.0; - y = 0.0; - z = 0.0; - num = 0; -} - - -Point::Point(double x, double y, double z) -{ - this->x = x; - this->y = y; - this->z = z; -} - - -Point::~Point() -{ - -} - - -void Point::setX(double x) -{ - this->x = x; -} - - -void Point::setY(double y) -{ - this->y = y; -} - - -void Point::setZ(double z) -{ - this->z = z; -} - - -void Point::setXYZ(double x, double y, double z) -{ - this->x = x; - this->y = y; - this->z = z; -} - - -void Point::setNum(int n) -{ - num = n; -} - - -double Point::getX() const -{ - return x; -} - - -double Point::getY() const -{ - return y; -} - - -double Point::getZ() const -{ - return z; -} - - -double Point::distance(Point& p) -{ - // Assume we're on the unit sphere - return acos(p.getX()*x + p.getY()*y + p.getZ()*z); -} - - -int Point::getNum() const -{ - return num; -} - - -void Point::normalize() -{ - double mag; - - mag = sqrt(x*x + y*y + z*z); - x = x / mag; - y = y / mag; - z = z / mag; -} - - -Point Point::operator+(Point p) -{ - Point retval; - - retval.x = x + p.x; - retval.y = y + p.y; - retval.z = z + p.z; - retval.num = num; - - return retval; -} - - -Point Point::operator-(Point p) -{ - Point retval; - - retval.x = x - p.x; - retval.y = y - p.y; - retval.z = z - p.z; - retval.num = num; - - return retval; -} - - -Point Point::operator*(double s) -{ - Point retval; - - retval.x = s * x; - retval.y = s * y; - retval.z = s * z; - retval.num = num; - - return retval; -} - - -ostream& operator<<(ostream& output, const Point& p) -{ - output << p.num << " : " << p.x << " " << p.y << " " << p.z; - // output << p.x << " " << p.y << " " << p.z; - return output; -} diff --git a/grid_gen/icosdiv/Point.h b/grid_gen/icosdiv/Point.h deleted file mode 100644 index 25a8ed5d8..000000000 --- a/grid_gen/icosdiv/Point.h +++ /dev/null @@ -1,33 +0,0 @@ -#ifndef _PointH -#define _PointH -#include -#include -using namespace std; - -class Point -{ - private: - double x, y, z; - int num; - public: - Point(); - Point(double x, double y, double z); - ~Point(); - void setX(double x); - void setY(double y); - void setZ(double z); - void setXYZ(double x, double y, double z); - void setNum(int n); - double getX() const; - double getY() const; - double getZ() const; - double distance(Point& p); - int getNum() const; - void normalize(); - Point operator+(Point p); - Point operator-(Point p); - Point operator*(double s); - friend ostream& operator<<(ostream& output, const Point& p); -}; - -#endif diff --git a/grid_gen/icosdiv/Triangle.cc b/grid_gen/icosdiv/Triangle.cc deleted file mode 100644 index 9e0503f57..000000000 --- a/grid_gen/icosdiv/Triangle.cc +++ /dev/null @@ -1,54 +0,0 @@ -#include "Triangle.h" - -/* - Point * points[3]; - public: - Triangle(); - Triangle(Point& a, Point& b, Point& c); - ~Triangle(); - void setPoint(Point& p, int n); - Point * getPoint(int n); - friend ostream& operator<<(ostream& output, const Triangle& t) -*/ - -Triangle::Triangle() -{ - points[0] = NULL; - points[1] = NULL; - points[2] = NULL; -} - - -Triangle::Triangle(Point& a, Point& b, Point& c) -{ - points[0] = &a; - points[1] = &b; - points[2] = &c; -} - - -Triangle::~Triangle() -{ - // Nothing to do... -} - - -void Triangle::setPoint(Point* p, int n) -{ - // assert(n >= 0 && n <= 2); - points[n] = p; -} - - -Point * Triangle::getPoint(int n) -{ - // assert(n >= 0 && n <= 2); - return points[n]; -} - - -ostream& operator<<(ostream& output, const Triangle& t) -{ - // output << "(" << p.x << ", " << p.y << ", " << p.z << ")"; - return output; -} diff --git a/grid_gen/icosdiv/Triangle.h b/grid_gen/icosdiv/Triangle.h deleted file mode 100644 index 7baf4b6e7..000000000 --- a/grid_gen/icosdiv/Triangle.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef _TriangleH -#define _TriangleH -#include -#include "Point.h" -using namespace std; - -class Triangle -{ - private: - Point * points[3]; - public: - Triangle(); - Triangle(Point& a, Point& b, Point& c); - ~Triangle(); - void setPoint(Point* p, int n); - Point * getPoint(int n); - friend ostream& operator<<(ostream& output, const Triangle& t); -}; -#endif diff --git a/grid_gen/icosdiv/icosdiv.cc b/grid_gen/icosdiv/icosdiv.cc deleted file mode 100644 index 3ae8847a6..000000000 --- a/grid_gen/icosdiv/icosdiv.cc +++ /dev/null @@ -1,259 +0,0 @@ -#include -#include -#include -#include -#include -#include "Point.h" -#include "Triangle.h" - -using namespace std; - -void add_point(set& points, Point* newpt, int& np) -{ - set::iterator ip; - - // If the point doesn't exist, we assign it the next highest number - // and add it to the set - ip = points.find(*newpt); - - if (ip == points.end()) { - newpt->setNum(np++); - points.insert(*newpt); - } - - // Otherwise, we want newpt to equal the existing point - else { - *newpt = *ip; - } -} - -inline bool operator<(Point const& lhs, Point const& rhs) -{ - if (lhs.getX() < rhs.getX()) - return true; - else if (lhs.getX() > rhs.getX()) - return false; - else - if (lhs.getY() < rhs.getY()) - return true; - else if (lhs.getY() > rhs.getY()) - return false; - else - if (lhs.getZ() < rhs.getZ()) - return true; - else - return false; -} - - -inline bool operator>(Point const& lhs, Point const& rhs) -{ - if (lhs.getX() > rhs.getX()) - return true; - else if (lhs.getX() > rhs.getX()) - return false; - else - if (lhs.getY() > rhs.getY()) - return true; - else if (lhs.getY() > rhs.getY()) - return false; - else - if (lhs.getZ() > rhs.getZ()) - return true; - else - return false; -} - - -inline bool operator==(Point const& lhs, Point const& rhs) -{ - if (lhs.getX() == rhs.getX() && - lhs.getY() == rhs.getY() && - lhs.getZ() == rhs.getZ()) - return true; - else - return false; -} - - -Point * great_circle_points(Point& p1, Point& p2, int n) -{ - double x1, x2, y1, y2, z1, z2; - double x, y, z; - double dtheta, dinc, dt, dx; - Point * pl; - int i; - - x1 = p1.getX(); y1 = p1.getY(); z1 = p1.getZ(); - x2 = p2.getX(); y2 = p2.getY(); z2 = p2.getZ(); - - // For unit sphere, distance is the same as arc angle - dtheta = p1.distance(p2); - dinc = dtheta / (double)(n-1); - - pl = new Point[n]; - - pl[0].setXYZ(x1, y1, z1); - pl[n-1].setXYZ(x2, y2, z2); - - // Fill in interior points - for(i=1; i points; - vector triangles; - set::iterator ip; - vector::iterator it; - - int div_factor = 76; - - // Read in 12 icosahedral vertices - fin.open("locs.dat",ifstream::in); - for(i=0; i<12; i++) { - fin >> x >> y >> z; - icos[i].setXYZ(x, y, z); - } - fin.close(); - - // Read in triangulation of icosahedral points - fin.open("tri.dat",ifstream::in); - for(i=0; i<20; i++) { - fin >> tri[i][0] >> tri[i][1] >> tri[i][2]; - } - fin.close(); - - np = 1; - - // In the code below, we actually know which points will be duplicated between - // the 20 large (icosahedral) triangles -- exactly those points along the perimeter - // of the triangle; so, we could take advantage of this information in the - // add_point() subroutine. - - - // Subdivide each triangle - for(k=0; k<20; k++) { - line = divide_triangle(icos[tri[k][0]-1], icos[tri[k][1]-1], icos[tri[k][2]-1], div_factor); - - // Get triangulation - for(i=1; isetPoint(p, 0); - p = new Point; *p = line[i-1][j]; add_point(points, p, np); t->setPoint(p, 1); - p = new Point; *p = line[i][j+1]; add_point(points, p, np); t->setPoint(p, 2); - triangles.push_back(*t); - -// cout << "Creating triangle from " << line[i-1][j].getNum() << " " << line[i-1][j+1].getNum() << " " << line[i][j+1].getNum() << endl; - t = new Triangle; - p = new Point; *p = line[i-1][j]; add_point(points, p, np); t->setPoint(p, 0); - p = new Point; *p = line[i-1][j+1]; add_point(points, p, np); t->setPoint(p, 1); - p = new Point; *p = line[i][j+1]; add_point(points, p, np); t->setPoint(p, 2); - triangles.push_back(*t); - } -// cout << "Creating triangle from " << line[i][i-1].getNum() << " " << line[i-1][i-1].getNum() << " " << line[i][i].getNum() <setPoint(p, 0); - p = new Point; *p = line[i-1][i-1]; add_point(points, p, np); t->setPoint(p, 1); - p = new Point; *p = line[i][i]; add_point(points, p, np); t->setPoint(p, 2); - triangles.push_back(*t); - } - p = NULL; t = NULL; - - for(j=0; jgetPoint(0)) << endl; - cout << *(it->getPoint(1)) << endl; - cout << *(it->getPoint(2)) << endl; - cout << *(it->getPoint(0)) << endl; - cout << endl; - cout << endl; - } -*/ - - triangles.clear(); - points.clear(); - - - return 0; -} diff --git a/grid_gen/icosdiv/locs.dat b/grid_gen/icosdiv/locs.dat deleted file mode 100644 index 1d284e890..000000000 --- a/grid_gen/icosdiv/locs.dat +++ /dev/null @@ -1,12 +0,0 @@ - 0.0000000000 0.5257311121 0.8506508084 - 0.0000000000 -0.5257311121 0.8506508084 - 0.0000000000 0.5257311121 -0.8506508084 - 0.0000000000 -0.5257311121 -0.8506508084 - 0.5257311121 0.8506508084 0.0000000000 - -0.5257311121 0.8506508084 0.0000000000 - 0.5257311121 -0.8506508084 0.0000000000 - -0.5257311121 -0.8506508084 0.0000000000 - 0.8506508084 0.0000000000 0.5257311121 - -0.8506508084 0.0000000000 0.5257311121 - 0.8506508084 0.0000000000 -0.5257311121 - -0.8506508084 0.0000000000 -0.5257311121 diff --git a/grid_gen/icosdiv/tri.dat b/grid_gen/icosdiv/tri.dat deleted file mode 100644 index b7c3621a3..000000000 --- a/grid_gen/icosdiv/tri.dat +++ /dev/null @@ -1,20 +0,0 @@ - 1 5 6 - 1 6 10 - 1 2 10 - 1 2 9 - 1 5 9 - 2 8 10 - 2 7 8 - 2 7 9 - 3 4 12 - 3 6 12 - 3 5 6 - 3 5 11 - 3 4 11 - 4 7 8 - 4 8 12 - 4 7 11 - 5 9 11 - 6 10 12 - 7 9 11 - 8 10 12 diff --git a/grid_gen/mesh_conversion_tools/mark_horns_for_culling.py b/grid_gen/mesh_conversion_tools/mark_horns_for_culling.py deleted file mode 100755 index 69caeccf7..000000000 --- a/grid_gen/mesh_conversion_tools/mark_horns_for_culling.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python -''' -This script identifies "horns" on a mesh (cells with two or fewer neighbors), -and marks them for culling. In some cores/configurations, these weakly-connected -cells can be dynamically inactive, and, therefore, undesirable to keep in a mesh. - -The method used will work on both planar and spherical meshes. -It adds the new masked cell to an existing 'cullCell' field if it exists, -otherwise it creates a new field. -''' - -import sys -import numpy as np -import netCDF4 -from optparse import OptionParser -from datetime import datetime - - -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)\n" -parser = OptionParser() -parser.description = __doc__ -parser.add_option("-f", "--file", dest="inputFile", help="Name of file to be processed.", default="grid.nc", metavar="FILENAME") -for option in parser.option_list: - if option.default != ("NO", "DEFAULT"): - option.help += (" " if option.help else "") + "[default: %default]" -options, args = parser.parse_args() - -print " File to be modified: " + options.inputFile - - -# Open file and get needed fields. -inputFile = netCDF4.Dataset(options.inputFile, 'r+') -nCells = len(inputFile.dimensions['nCells']) -cellsOnCell = inputFile.variables['cellsOnCell'][:] - -# Add the horn cells to existing mask if it exists -if 'cullCell' in inputFile.variables: - cullCell = inputFile.variables['cullCell'][:] -else: # otherwise make a new mask initialized empty - cullCell = np.zeros( (nCells,) ) # local variable - -nHorns = 0 -for i in range(nCells): - if (cellsOnCell[i,:] > 0).sum() <= 2: # NOTE: Can change this threshold, if needed for a particular use case. - cullCell[i] = 1 - nHorns += 1 - -# Write out the new field -if 'cullCell' in inputFile.variables: - cullCellVar = inputFile.variables['cullCell'] -else: - cullCellVar = inputFile.createVariable('cullCell', 'i', ('nCells',)) -cullCellVar[:] = cullCell - - -# Update history attribute of netCDF file -thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) -if hasattr(inputFile, 'history'): - newhist = '\n'.join([thiscommand, getattr(inputFile, 'history')]) -else: - newhist = thiscommand -setattr(inputFile, 'history', newhist ) - -inputFile.close() - -print '\n{} "horn" locations have been marked in the field cullCell.'.format(nHorns) -print "Remember to use MpasCellCuller.x to actually remove them!" diff --git a/grid_gen/periodic_general/DensityFunction.cxx b/grid_gen/periodic_general/DensityFunction.cxx deleted file mode 100644 index 60ff12565..000000000 --- a/grid_gen/periodic_general/DensityFunction.cxx +++ /dev/null @@ -1,255 +0,0 @@ -#include -#include -#include "DensityFunction.h" -#include "netcdf.h" -#include - -DensityFunction::DensityFunction(double X_PERIOD, double Y_PERIOD, int USE_DATA_DENSITY) -{ - - minX = minY = 0.0; - maxX = X_PERIOD; - maxY = Y_PERIOD; - use_data_density = USE_DATA_DENSITY; - - if (use_data_density == 1){ - read_density_netcdf(&xPosDG, &yPosDG, &densityDG, dxDG, dyDG); - - dxDG = xPosDG[1] - xPosDG[0]; - cout << " dx=" << dxDG <0) { - cout << "Error reading density.nc. Aborting." << endl; - exit(1); - } - - // Get needed dimensions - ncerr = nc_inq_dimid(ncid, "x", &x_dimID); - ncerr = nc_inq_dimlen(ncid, x_dimID, &temp); - x_dim = (int)temp; - ncerr = nc_inq_dimid(ncid, "y", &y_dimID); - ncerr = nc_inq_dimlen(ncid, y_dimID, &temp); - y_dim = (int)temp; - cout << " Got dimensions from file." <= nxDG - 1) { - xpos = nxDG - 2; - } - ypos = (int) floor( (y - yPosDG[0]) / dyDG); - if (ypos < 0) { - ypos = 0; - } else if (ypos >= nyDG - 1) { - ypos = nyDG - 2; - } - - return densityDG[ypos * nxDG + xpos]; -} - - - -double DensityFunction::BilinearInterp(double x, double y) -{ -// Gives the value of the density function at x,y using Bilinear Interpolation - - int xpos, ypos; // the cells that the point falls in - double value; - - xpos = (int) floor( (x - xPosDG[0]) / dxDG); // floor should not be needed since c++ will truncate... - if (xpos < 0) { - xpos = 0; - } else if (xpos >= nxDG - 1) { - xpos = nxDG - 2; - } - ypos = (int) floor( (y - yPosDG[0]) / dyDG); - if (ypos < 0) { - ypos = 0; - } else if (ypos >= nyDG - 1) { - ypos = nyDG - 2; - } - - value = ( - densityDG[ypos * nxDG + xpos] * (xPosDG[xpos+1] - x) * (yPosDG[ypos+1] - y) + - densityDG[(ypos+1) * nxDG + xpos] * (xPosDG[xpos+1] - x) * (y - yPosDG[ypos]) + - densityDG[ypos * nxDG + xpos+1] * (x - xPosDG[xpos]) * (yPosDG[ypos+1] - y) + - densityDG[(ypos+1) * nxDG + xpos+1] * (x - xPosDG[xpos]) * (y - yPosDG[ypos]) - ) / (dxDG * dyDG); - - return value; -} - diff --git a/grid_gen/periodic_general/DensityFunction.h b/grid_gen/periodic_general/DensityFunction.h deleted file mode 100644 index 96972a563..000000000 --- a/grid_gen/periodic_general/DensityFunction.h +++ /dev/null @@ -1,30 +0,0 @@ -#ifndef _DensityFunctionH -#define _DensityFunctionH -#include "Point.h" - -using namespace std; - -class DensityFunction -{ - private: - double minX, maxX, minY, maxY; - double f(double x, double y); - double AnalyticDensityFunction(double x, double y); - double DataDensityFunction(double x, double y); - double *xPosDG, *yPosDG, *densityDG; // The x (1d), y (1d), and density (2d) values of the data density function (regular grid) - double dxDG, dyDG; // grid spacing on the regular data density grid - int nxDG, nyDG; // number of cells on regular data density grid - void read_density_netcdf(double **xPosDG, double **yPosDG, double **densityDG, int dxDG, int dyDG); - double UniformValue(double x, double y); - double BilinearInterp(double x, double y); - int use_data_density; - public: - DensityFunction(double X_PERIOD, double Y_PERIOD, int USE_DATA_DENSITY); - ~DensityFunction(); - double evaluate(Point& p); - Point * randomPoint(); - void randomPoint(Point& p); -}; - - -#endif diff --git a/grid_gen/periodic_general/Makefile b/grid_gen/periodic_general/Makefile deleted file mode 100644 index 463ba6d0e..000000000 --- a/grid_gen/periodic_general/Makefile +++ /dev/null @@ -1,28 +0,0 @@ -.SUFFIXES: .cxx .o - -OMP = -CXX = g++ -CXXFLAGS = -O3 $(OMP) -I$(NETCDF)/include -I/usr/include/ - -OBJS = Point.o Triangle.o DensityFunction.o PointSet.o - -all: libfortune.a - ( $(MAKE) periodic_general ) - ( $(MAKE) mkgrid ) - -libfortune.a: - ( cd fortune; $(MAKE) all "CC=$(CXX)" "CFLAGS=$(CXXFLAGS)") - ( ln -s fortune/libfortune.a . ) - -periodic_general: main.o $(OBJS) - $(CXX) -O3 $(OMP) -o periodic_general main.o $(OBJS) -L$(NETCDF)/lib -L. -lnetcdf -lfortune - -mkgrid: mkgrid.o $(OBJS) - $(CXX) -O3 $(OMP) -o mkgrid mkgrid.o $(OBJS) -L$(NETCDF)/lib -L. -lnetcdf -lfortune - -clean: - ( cd fortune; $(MAKE) clean ) - rm -f *.o periodic_general libfortune.a - -.cxx.o: - $(CXX) $(CXXFLAGS) -c $< -I./fortune diff --git a/grid_gen/periodic_general/Point.cxx b/grid_gen/periodic_general/Point.cxx deleted file mode 100644 index 9e462962b..000000000 --- a/grid_gen/periodic_general/Point.cxx +++ /dev/null @@ -1,157 +0,0 @@ -#include "Point.h" - -Point::Point() -{ - x = 0.0; - y = 0.0; - boundary_point = 0; - num = 0; -} - - -Point::Point(double x, double y, int boundary_point) -{ - this->x = x; - this->y = y; - this->boundary_point = boundary_point; -} - - -Point::~Point() -{ - -} - - -void Point::setX(double x) -{ - this->x = x; -} - - -void Point::setY(double y) -{ - this->y = y; -} - - -void Point::setXY(double x, double y) -{ - this->x = x; - this->y = y; -} - - -void Point::setBoundaryPoint(int boundary_point) -{ - this->boundary_point = boundary_point; -} - - -void Point::setNum(int n) -{ - num = n; -} - - -double Point::getX() const -{ - return x; -} - - -double Point::getY() const -{ - return y; -} - - -double Point::distance(Point& p) -{ - double xd, yd; - - xd = p.getX() - x; - yd = p.getY() - y; - return sqrt(xd*xd + yd*yd); -} - - -int Point::isBoundaryPoint() const -{ - return boundary_point; -} - - -int Point::getNum() const -{ - return num; -} - - -Point Point::operator+(Point p) -{ - Point retval; - - retval.x = x + p.x; - retval.y = y + p.y; - retval.boundary_point = boundary_point; - retval.num = num; - - return retval; -} - - -Point Point::operator-(Point p) -{ - Point retval; - - retval.x = x - p.x; - retval.y = y - p.y; - retval.boundary_point = boundary_point; - retval.num = num; - - return retval; -} - - -Point Point::operator*(double s) -{ - Point retval; - - retval.x = s * x; - retval.y = s * y; - retval.boundary_point = boundary_point; - retval.num = num; - - return retval; -} - - -ostream& operator<<(ostream& output, const Point& p) -{ - output << p.x << " " << p.y; - return output; -} - - -bool operator<(Point const& lhs, Point const& rhs) -{ - double a[2], b[2]; - - a[0] = lhs.x; - a[1] = lhs.y; - - b[0] = rhs.x; - b[1] = rhs.y; - - if (a[0] < b[0]) { - return true; - } - else if (a[0] == b[0]) { - if (a[1] < b[1]) { - return true; - } - } - - return false; -} diff --git a/grid_gen/periodic_general/Point.h b/grid_gen/periodic_general/Point.h deleted file mode 100644 index 948cfa9e5..000000000 --- a/grid_gen/periodic_general/Point.h +++ /dev/null @@ -1,33 +0,0 @@ -#ifndef _PointH -#define _PointH -#include -#include -using namespace std; - -class Point -{ - private: - double x, y; - int boundary_point; - int num; - public: - Point(); - Point(double x, double y, int boundary_point); - ~Point(); - void setX(double x); - void setY(double y); - void setXY(double x, double y); - void setBoundaryPoint(int boundary_point); - void setNum(int n); - double getX() const; - double getY() const; - double distance(Point& p); - int isBoundaryPoint() const; - int getNum() const; - Point operator+(Point p); - Point operator-(Point p); - Point operator*(double s); - friend ostream& operator<<(ostream& output, const Point& p); - friend bool operator<(Point const& lhs, Point const& rhs); -}; -#endif diff --git a/grid_gen/periodic_general/PointSet.cxx b/grid_gen/periodic_general/PointSet.cxx deleted file mode 100644 index c9aa85a58..000000000 --- a/grid_gen/periodic_general/PointSet.cxx +++ /dev/null @@ -1,576 +0,0 @@ -#include -#include -#include -#include -#include -#include "PointSet.h" -#include "DensityFunction.h" - -#define MIN(A,B) (B)<(A)?(B):(A) -#define MAX(A,B) (B)>(A)?(B):(A) - -void voronoi_main(PointSet *); - - -PointSet::PointSet() -{ - nPoints = 0; -} - - -PointSet::~PointSet() -{ - -} - - -void PointSet::makeMCPoints(int n, double X_PERIOD, double Y_PERIOD, int USE_DATA_DENSITY) -{ - //Create Monte Carlo random point set - int i; - srand(2); // set the seed for reproducibility - double x, y; - Point * p; - DensityFunction density(X_PERIOD, Y_PERIOD, USE_DATA_DENSITY); - - for(i = 0; i < n; i++){ - p = density.randomPoint(); - - p->setNum(nPoints); - nPoints++; - points.push_back(p); - - } -} - - -int PointSet::initFromTextFile(double X_PERIOD, double Y_PERIOD, const char * filename) -{ - ifstream fin(filename); - double xloc, yloc; - Point * p; - ifstream new_edges("new_edges"); - - assert(fin.is_open()); - - fin >> xloc >> yloc; - do { - p = new Point(xloc, yloc, 0); - p->setNum(nPoints); - nPoints++; - points.push_back(p); - fin >> xloc >> yloc; - } while (!fin.eof()); -} - - -void PointSet::print() -{ - vector::iterator it; - - cout << "We have " << nPoints << " points" << endl; - - for (it = points.begin(); it != points.end(); it++) { - cout << **it << endl; - } -} - - -void PointSet::printToTextFile(const char * filename) -{ - ofstream fout(filename); - vector::iterator it; - - assert(fout.is_open()); - - for (it = points.begin(); it != points.end(); it++) { - fout << (*it)->getX() << " " << (*it)->getY() << " " << (*it)->isBoundaryPoint() << endl; - } -} - - -void PointSet::addPoint(double x, double y, int boundary_point) -{ - Point * p = new Point(x, y, boundary_point); - p->setNum(nPoints); - nPoints++; - - points.push_back(p); -} - - -void PointSet::addPoint(Point& p) -{ - Point * pp = new Point(p); - nPoints++; - - points.push_back(pp); -} - - -int PointSet::size() -{ - return nPoints; -} - - -vector* PointSet::getTriangulation() -{ - triangulation = new vector; - - voronoi_main(this); - - return triangulation; -} - - -vector * PointSet::getVoronoiDiagram() -{ - vector * t; - vector::iterator it; - vector * voronoiCorners = new vector[nPoints]; - Point p; - int i, n; - int nobtuse; - - double PI = 2.0 * acos(0.0); - - // 1) Get a triangulation - t = PointSet::getTriangulation(); - - // 2) For each triangle, compute the associated Voronoi point - // Add this point to the list of Voronoi corner for each of the triangle's vertices - nobtuse = 0; - for (it = triangulation->begin(); it != triangulation->end(); it++) { - if (fabs(angle(it->getVertex(0), it->getVertex(1), it->getVertex(2))) > PI/2.0) nobtuse++; - if (fabs(angle(it->getVertex(1), it->getVertex(2), it->getVertex(0))) > PI/2.0) nobtuse++; - if (fabs(angle(it->getVertex(2), it->getVertex(0), it->getVertex(1))) > PI/2.0) nobtuse++; - p = it->circumcenter(); - for (i=0; i<3; i++) { - n = it->getVertex(i).getNum(); - //assert(n >= 0 && n < nPoints); - voronoiCorners[n].push_back(p); - } - } - -cout << nobtuse << " obtuse angles\n"; - - delete t; - - // 3) For each point, order its list of Voronoi corners in ccw order - for (i=0; i * PointSet::getDelaunayAdjacency() -{ - vector * t; - vector::iterator it; - vector * adjacencyList = new vector[nPoints]; - Point p0, p1, p2; - int i, j, found, n0, n1, n2; - - t = PointSet::getTriangulation(); - - for (it = triangulation->begin(); it != triangulation->end(); it++) { - p0 = it->getVertex(0); - p1 = it->getVertex(1); - p2 = it->getVertex(2); - - n0 = p0.getNum(); - n1 = p1.getNum(); - n2 = p2.getNum(); - - found = 0; - for(j=0; j::iterator it; - - x = p.getX(); - y = p.getY(); - - minD = 1.e20; - for (it = points.begin(), idx=0; it != points.end(); it++, idx++) { - d = pow((*it)->getX() - x, 2.0) + pow((*it)->getY() - y, 2.0); - if (d < minD) {minD = d; minIdx = idx;} - } - - return minIdx; -} - - -Point* PointSet::operator[](int i) -{ - assert(i >= 0 && i < nPoints); - return points[i]; -} - - -double angle(Point o, Point p1, Point p2) -{ - double P1x, P1y, mP1; - double P2x, P2y, mP2; - double cos_angle; - - P1x = p1.getX() - o.getX(); - P1y = p1.getY() - o.getY(); - - mP1 = sqrt(P1x*P1x + P1y*P1y); - - P2x = p2.getX() - o.getX(); - P2y = p2.getY() - o.getY(); - - mP2 = sqrt(P2x*P2x + P2y*P2y); - - cos_angle = (P1x*P2x + P1y*P2y) / (mP1 * mP2); - - if (((P1x * P2y) - (P1y * P2x)) >= 0.0) - return acos(MAX(MIN(cos_angle,1.0),-1.0)); - else - return -acos(MAX(MIN(cos_angle,1.0),-1.0)); - - return 1.0; -} - - -void orderCCW(vector& vc, Point p) -{ - int i, j; - int vsize; - double * angles; - double ftemp; - Point ptemp; - - double PI = 2.0 * acos(0.0); - - vsize = vc.size(); - angles = new double[vsize]; - - angles[0] = 0.0; - for (i=1; i& vc, Point p, double x_period, double y_period) -{ - int i, j; - int vsize; - double * angles; - double ftemp; - Point ptemp; - - double PI = 2.0 * acos(0.0); - - vsize = vc.size(); - angles = new double[vsize]; - - for (i=0; i (x_period / 2.0) ) { - vc[i].setX( vc[i].getX() - x_period ); - } - else if ( (vc[i].getX() - p.getX()) < (-x_period / 2.0) ) { - vc[i].setX( vc[i].getX() + x_period ); - } - - if ( (vc[i].getY() - p.getY()) > (y_period / 2.0) ) { - vc[i].setY( vc[i].getY() - y_period ); - } - else if ( (vc[i].getY() - p.getY()) < (-y_period / 2.0) ) { - vc[i].setY( vc[i].getY() + y_period ); - } - } - - angles[0] = 0.0; - for (i=1; i& vc1, vector& vc2, Point p, double x_period, double y_period) -{ - int i, j; - double * angles; - double ftemp; - Point ptemp; - - double PI = 2.0 * acos(0.0); - - if (vc1.size() != vc2.size()) { - cerr << "Error: In orderCCW_normalize2, input vectors have different size." << endl; - return; - } - - angles = new double[vc1.size()]; - - - /* Normalize points in vc1 */ - for (i=0; i (x_period / 2.0) ) { - vc1[i].setX( vc1[i].getX() - x_period ); - } - else if ( (vc1[i].getX() - p.getX()) < (-x_period / 2.0) ) { - vc1[i].setX( vc1[i].getX() + x_period ); - } - - if ( (vc1[i].getY() - p.getY()) > (y_period / 2.0) ) { - vc1[i].setY( vc1[i].getY() - y_period ); - } - else if ( (vc1[i].getY() - p.getY()) < (-y_period / 2.0) ) { - vc1[i].setY( vc1[i].getY() + y_period ); - } - } - - - /* Normalize points in vc2 */ - for (i=0; i (x_period / 2.0) ) { - vc1[i].setX( vc1[i].getX() - x_period ); - } - else if ( (vc1[i].getX() - p.getX()) < (-x_period / 2.0) ) { - vc1[i].setX( vc1[i].getX() + x_period ); - } - - if ( (vc1[i].getY() - p.getY()) > (y_period / 2.0) ) { - vc1[i].setY( vc1[i].getY() - y_period ); - } - else if ( (vc1[i].getY() - p.getY()) < (-y_period / 2.0) ) { - vc1[i].setY( vc1[i].getY() + y_period ); - } - } - - - /* Order points in vc1 */ - angles[0] = 0.0; - for (i=1; i& vc, Point p) -{ - int i, j; - int vsize; - double * angles; - double ftemp; - Point ptemp; - - double PI = 2.0 * acos(0.0); - - vsize = vc.size(); - angles = new double[vsize]; - - angles[0] = 0.0; - for (i=1; i points; - vector * triangulation; - - public: - PointSet(); - ~PointSet(); - int initFromTextFile(double X_PERIOD, double Y_PERIOD, const char *); - void makeMCPoints(int n, double X_PERIOD, double Y_PERIOD, int USE_DATA_DENSITY); - void print(); - void printToTextFile(const char *); - void addPoint(double x, double y, int boundary_point); - void addPoint(Point& p); - int size(); - vector* getTriangulation(); - vector * getVoronoiDiagram(); - vector * getDelaunayAdjacency(); - int nearestPoint(Point& p); - Point* operator[](int i); - friend void readsites(PointSet * p); - friend void out_triple(PointSet * p, Site * s1, Site * s2, Site * s3); -}; - -double angle(Point o, Point p1, Point p2); -void orderCCW(vector& vc, Point p); -void orderCCW_normalize(vector& vc, Point p, double x_period, double y_period); -void orderCCW_normalize2(vector& vc1, vector& vc2, Point p, double x_period, double y_period); -double poly_area(vector& vc); -void orderCCW_print(vector& vc, Point p); -void periodic_normalize(vector& vc, double x_period, double y_period); -#endif diff --git a/grid_gen/periodic_general/Triangle.cxx b/grid_gen/periodic_general/Triangle.cxx deleted file mode 100644 index ddf41da94..000000000 --- a/grid_gen/periodic_general/Triangle.cxx +++ /dev/null @@ -1,351 +0,0 @@ -#include -#include -#include "Triangle.h" - -Triangle::Triangle() -{ - points[0] = Point(0.0, 0.0, 0); - points[1] = Point(0.0, 0.0, 0); - points[2] = Point(0.0, 0.0, 0); -} - -Triangle::Triangle(Point a, Point b, Point c) -{ - points[0] = a; - points[1] = b; - points[2] = c; -} - - -Triangle::~Triangle() -{ - -} - - -void Triangle::setVertex(int i, Point p) -{ - assert(i >= 0 && i <= 2); - points[i] = p; -} - - -Point Triangle::getVertex(int i) const -{ - assert(i >= 0 && i <= 2); - return points[i]; -} - - -double Triangle::area() -{ - double a, b, c, s, R; - - // Compute side lengths - a = sqrt(pow(points[0].getX() - points[1].getX(),2.0) + pow(points[0].getY() - points[1].getY(),2.0)); - b = sqrt(pow(points[1].getX() - points[2].getX(),2.0) + pow(points[1].getY() - points[2].getY(),2.0)); - c = sqrt(pow(points[0].getX() - points[2].getX(),2.0) + pow(points[0].getY() - points[2].getY(),2.0)); - - // Compute semiperimiter - s = (a + b + c) / 2.0; - - // Compute area - return sqrt(s*(a + b - s)*(a + c - s)*(b + c - s)); -} - - -Point Triangle::centroid() -{ - Point p; - - p.setX((points[0].getX() + points[1].getX() + points[2].getX()) * 0.33333333); - p.setY((points[0].getY() + points[1].getY() + points[2].getY()) * 0.33333333); - - return p; -} - - -void Triangle::divide_segment(Point p1, Point p2, Point list[], int n) -{ - int i; - Point vec; - - list[0] = p1; - list[n-1] = p2; - - vec.setXY(p2.getX() - p1.getX(), p2.getY() - p1.getY()); - - for(i=1; i=2 - int numTri = pow(GLEV - 1, 2); // Number of triangles created by subdividing based on GLEV - int i, j, k; - double density, total_weight; - Point o, c; - Point line[GLEV][GLEV]; - Point p1p2[GLEV]; - Point p1p3[GLEV]; - Point p[numTri][3]; - Triangle t(o,o,o); // Initially, we don't care what t is - - divide_segment(points[0], points[1], p1p2, GLEV); - divide_segment(points[0], points[2], p1p3, GLEV); - - line[0][0] = points[0]; - line[1][0] = p1p2[1]; - line[1][1] = p1p3[1]; - - for (i=2; i=2; i--) { - for(j=1; j xmax) - ngreater++; - } - - /* - * If at least two corners of the triangle are below the lower bound, shift all corners - */ - if (nless > 1) { - for(int j=0; j<3; j++) { - x = points[j].getX(); - points[j].setX(x + xmax - xmin); - } - } - /* - * Else if at least two corners of the triangle are more the upper bound, shift all corners - */ - else if (ngreater > 1) { - for(int j=0; j<3; j++) { - x = points[j].getX(); - points[j].setX(x - xmax + xmin); - } - } - - - /* - * Scan through y-coordinates and count the number that are at most the lower bound - * and that are greater than the upper bound - */ - nless = ngreater = 0; - for(int j=0; j<3; j++) { - y = points[j].getY(); - - if (y <= ymin) - nless++; - else if (y > ymax) - ngreater++; - } - - /* - * If at least two corners of the triangle are below the lower bound, shift all corners - */ - if (nless > 1) { - for(int j=0; j<3; j++) { - y = points[j].getY(); - points[j].setY(y + ymax - ymin); - } - } - /* - * Else if at least two corners of the triangle are more the upper bound, shift all corners - */ - else if (ngreater > 1) { - for(int j=0; j<3; j++) { - y = points[j].getY(); - points[j].setY(y - ymax + ymin); - } - } -} - - -double Triangle::det(double m[3][3]) -{ - return m[0][0] * (m[1][1]*m[2][2] - m[1][2]*m[2][1]) - m[0][1] * (m[1][0]*m[2][2] - m[1][2]*m[2][0]) + m[0][2] * (m[1][0]*m[2][1] - m[1][1]*m[2][0]); -} - - -bool operator==(Triangle& lhs, Triangle& rhs) -{ - int a[3], b[3]; - int j; - - a[0] = lhs.getVertex(0).getNum(); - a[1] = lhs.getVertex(1).getNum(); - a[2] = lhs.getVertex(2).getNum(); - - if (a[1] < a[0]) { j = a[1]; a[1] = a[0]; a[0] = j; } - if (a[2] < a[0]) { j = a[2]; a[2] = a[0]; a[0] = j; } - if (a[2] < a[1]) { j = a[2]; a[2] = a[1]; a[1] = j; } - - b[0] = rhs.getVertex(0).getNum(); - b[1] = rhs.getVertex(1).getNum(); - b[2] = rhs.getVertex(2).getNum(); - - if (b[1] < b[0]) { j = b[1]; b[1] = b[0]; b[0] = j; } - if (b[2] < b[0]) { j = b[2]; b[2] = b[0]; b[0] = j; } - if (b[2] < b[1]) { j = b[2]; b[2] = b[1]; b[1] = j; } - - if (a[0] == b[0] && a[1] == b[1] && a[2] == b[2]) - return true; - - return false; -} - - -bool operator<(Triangle const& lhs, Triangle const& rhs) -{ - int a[3], b[3]; - int j; - - a[0] = lhs.points[0].getNum(); - a[1] = lhs.points[1].getNum(); - a[2] = lhs.points[2].getNum(); - - if (a[1] < a[0]) { j = a[1]; a[1] = a[0]; a[0] = j; } - if (a[2] < a[0]) { j = a[2]; a[2] = a[0]; a[0] = j; } - if (a[2] < a[1]) { j = a[2]; a[2] = a[1]; a[1] = j; } - - b[0] = rhs.points[0].getNum(); - b[1] = rhs.points[1].getNum(); - b[2] = rhs.points[2].getNum(); - - if (b[1] < b[0]) { j = b[1]; b[1] = b[0]; b[0] = j; } - if (b[2] < b[0]) { j = b[2]; b[2] = b[0]; b[0] = j; } - if (b[2] < b[1]) { j = b[2]; b[2] = b[1]; b[1] = j; } - - if (a[0] < b[0]) { - return true; - } - else if (a[0] == b[0]) { - if (a[1] < b[1]) { - return true; - } - else if (a[1] == b[1]) { - if (a[2] < b[2]) { - return true; - } - } - } - - return false; -} diff --git a/grid_gen/periodic_general/Triangle.h b/grid_gen/periodic_general/Triangle.h deleted file mode 100644 index 089bb3849..000000000 --- a/grid_gen/periodic_general/Triangle.h +++ /dev/null @@ -1,30 +0,0 @@ -#ifndef _TriangleH -#define _TriangleH -#include -#include "Point.h" -#include "DensityFunction.h" -using namespace std; - -class Triangle -{ - private: - Point points[3]; - double det(double m[3][3]); - void divide_segment(Point p1, Point p2, Point list[], int n); - - public: - Triangle(); - Triangle(Point a, Point b, Point c); - ~Triangle(); - void setVertex(int i, Point p); - Point getVertex(int i) const; - double area(); - Point centroid(); - Point centroid(DensityFunction& d, double * mass); - Point circumcenter(); - void normalizeVertices(double xmin, double xmax, double ymin, double ymax); - friend bool operator==(Triangle& lhs, Triangle& rhs); - friend bool operator<(Triangle const& lhs, Triangle const& rhs); -}; - -#endif diff --git a/grid_gen/periodic_general/cells_hex.ncl b/grid_gen/periodic_general/cells_hex.ncl deleted file mode 100644 index b1db3d299..000000000 --- a/grid_gen/periodic_general/cells_hex.ncl +++ /dev/null @@ -1,186 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" - -begin - - plotfield = "area" - level = 5 - winds = False - nrows = 100 - ncols = 100 - maxedges = 9 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"wh-bl-gr-ye-re") - - f = addfile("grid.nc","r") - - xCell = f->xCell(:) - yCell = f->yCell(:) - zCell = f->zCell(:) - xEdge = f->xEdge(:) - yEdge = f->yEdge(:) - zEdge = f->zEdge(:) - xVertex = f->xVertex(:) - yVertex = f->yVertex(:) - zVertex = f->zVertex(:) - verticesOnCell = f->verticesOnCell(:,:) - edgesOnCell = f->edgesOnCell(:,:) - edgesOnEdge = f->edgesOnEdge(:,:) - verticesOnEdge = f->verticesOnEdge(:,:) - cellsOnEdge = f->cellsOnEdge(:,:) - cellsOnVertex = f->cellsOnVertex(:,:) - edgesOnVertex = f->edgesOnVertex(:,:) - nEdgesOnCell = f->nEdgesOnCell(:) - - res = True - - t = stringtointeger(getenv("T")) - - xpoly = new((/maxedges/), "double") - ypoly = new((/maxedges/), "double") - - xcb = new((/4/), "float") - ycb = new((/4/), "float") - - pres = True - pres@gsnFrame = False - pres@xyLineColor = "Background" - plot = gsn_xy(wks,xCell,yCell,pres) - - if (plotfield .eq. "area") then - fld = f->areaCell(:) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "w") then - fld = f->w(t,:,level) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "t") then - fld = f->theta(t,:,level) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "qr") then - fld = f->qr(t,:,level) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,0) - minfld = min(fld) - maxfld = max(fld) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,0) - minfld = min(fld) - maxfld = max(fld) - end if - scalefac = 198.0/(maxfld - minfld) - - if (plotfield .eq. "vorticity") then - do iRow=1,nrows-2 - do iCol=1,ncols-2 - iCell = iRow*ncols+iCol - do iVertex=2*iCell,2*iCell+1 - do i=0,2 - xpoly(i) = xCell(cellsOnVertex(iVertex,i)-1) - ypoly(i) = yCell(cellsOnVertex(iVertex,i)-1) - res@gsFillColor = doubletointeger((fld(iVertex)-minfld)*scalefac)+2 - end do - gsn_polygon(wks,plot,xpoly,ypoly,res); - end do - end do - end do - end if - - if (plotfield .eq. "area" .or. plotfield .eq. "ke" .or. plotfield .eq. "t" .or. plotfield .eq. "w" .or. plotfield .eq. "qr") then - do iRow=1,nrows-2 - do iCol=1,ncols-2 - iCell = iRow*ncols+iCol - do i=0,nEdgesOnCell(iCell)-1 - xpoly(i) = xVertex(verticesOnCell(iCell,i)-1) - ypoly(i) = yVertex(verticesOnCell(iCell,i)-1) - end do - do i=nEdgesOnCell(iCell),maxedges-1 - xpoly(i) = xpoly(0) - ypoly(i) = xpoly(0) - end do - gsn_polyline(wks,plot,xpoly(0:nEdgesOnCell(iCell)-1),ypoly(0:nEdgesOnCell(iCell)-1),res); - end do - end do - do iCell=10000,16083 - do i=0,nEdgesOnCell(iCell)-1 - xpoly(i) = xVertex(verticesOnCell(iCell,i)-1) - ypoly(i) = yVertex(verticesOnCell(iCell,i)-1) - end do - do i=nEdgesOnCell(iCell),maxedges-1 - xpoly(i) = xpoly(0) - ypoly(i) = xpoly(0) - end do - gsn_polyline(wks,plot,xpoly(0:nEdgesOnCell(iCell)-1),ypoly(0:nEdgesOnCell(iCell)-1),res); - end do - end if - - if (winds) then - u = 2.*f->u(t,:,level) - v = 2.*f->v(t,:,level) - alpha = f->angleEdge(:) - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - xwind = new(dimsizes(u),float) - ywind = new(dimsizes(u),float) - do i=0,esizes(0)-1 - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - xwind(i) = doubletofloat(xEdge(i)) - ywind(i) = doubletofloat(yEdge(i)) - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvect(wks, xwind, ywind, u_earth, v_earth) - end if - - ; - ; Draw label bar - ; - tres = True - tres@txAngleF = 90.0 - tres@txFontHeightF = 0.015 - do i=2,200 - xcb(0) = 0.1 + i*0.8/198 - ycb(0) = 0.1 - - xcb(1) = 0.1 + (i+1)*0.8/198 - ycb(1) = 0.1 - - xcb(2) = 0.1 + (i+1)*0.8/198 - ycb(2) = 0.15 - - xcb(3) = 0.1 + i*0.8/198 - ycb(3) = 0.15 - - res@gsFillColor = i - - gsn_polygon_ndc(wks,xcb,ycb,res); - - j = (i-2) % 20 - if ((j .eq. 0) .or. (i .eq. 200)) then - ff = minfld + (i-2) / scalefac - label = sprintf("%7.3g", ff) - gsn_text_ndc(wks, label, xcb(0), 0.05, tres) - end if - - end do - - frame(wks) - -end - diff --git a/grid_gen/periodic_general/centroids.txt b/grid_gen/periodic_general/centroids.txt deleted file mode 100644 index 62e4e270b..000000000 --- a/grid_gen/periodic_general/centroids.txt +++ /dev/null @@ -1,1600 +0,0 @@ - 0.5 0.866025403784439 - 1.5 0.866025403784439 - 2.5 0.866025403784439 - 3.5 0.866025403784439 - 4.5 0.866025403784439 - 5.5 0.866025403784439 - 6.5 0.866025403784439 - 7.5 0.866025403784439 - 8.5 0.866025403784439 - 9.5 0.866025403784439 - 10.5 0.866025403784439 - 11.5 0.866025403784439 - 12.5 0.866025403784439 - 13.5 0.866025403784439 - 14.5 0.866025403784439 - 15.5 0.866025403784439 - 16.5 0.866025403784439 - 17.5 0.866025403784439 - 18.5 0.866025403784439 - 19.5 0.866025403784439 - 20.5 0.866025403784439 - 21.5 0.866025403784439 - 22.5 0.866025403784439 - 23.5 0.866025403784439 - 24.5 0.866025403784439 - 25.5 0.866025403784439 - 26.5 0.866025403784439 - 27.5 0.866025403784439 - 28.5 0.866025403784439 - 29.5 0.866025403784439 - 30.5 0.866025403784439 - 31.5 0.866025403784439 - 32.5 0.866025403784439 - 33.5 0.866025403784439 - 34.5 0.866025403784439 - 35.5 0.866025403784439 - 36.5 0.866025403784439 - 37.5 0.866025403784439 - 38.5 0.866025403784439 - 39.5 0.866025403784439 - 1 1.73205080756888 - 2 1.73205080756888 - 3 1.73205080756888 - 4 1.73205080756888 - 5 1.73205080756888 - 6 1.73205080756888 - 7 1.73205080756888 - 8 1.73205080756888 - 9 1.73205080756888 - 10 1.73205080756888 - 11 1.73205080756888 - 12 1.73205080756888 - 13 1.73205080756888 - 14 1.73205080756888 - 15 1.73205080756888 - 16 1.73205080756888 - 17 1.73205080756888 - 18 1.73205080756888 - 19 1.73205080756888 - 20 1.73205080756888 - 21 1.73205080756888 - 22 1.73205080756888 - 23 1.73205080756888 - 24 1.73205080756888 - 25 1.73205080756888 - 26 1.73205080756888 - 27 1.73205080756888 - 28 1.73205080756888 - 29 1.73205080756888 - 30 1.73205080756888 - 31 1.73205080756888 - 32 1.73205080756888 - 33 1.73205080756888 - 34 1.73205080756888 - 35 1.73205080756888 - 36 1.73205080756888 - 37 1.73205080756888 - 38 1.73205080756888 - 39 1.73205080756888 - 40 1.73205080756888 - 0.5 2.59807621135332 - 1.5 2.59807621135332 - 2.5 2.59807621135332 - 3.5 2.59807621135332 - 4.5 2.59807621135332 - 5.5 2.59807621135332 - 6.5 2.59807621135332 - 7.5 2.59807621135332 - 8.5 2.59807621135332 - 9.5 2.59807621135332 - 10.5 2.59807621135332 - 11.5 2.59807621135332 - 12.5 2.59807621135332 - 13.5 2.59807621135332 - 14.5 2.59807621135332 - 15.5 2.59807621135332 - 16.5 2.59807621135332 - 17.5 2.59807621135332 - 18.5 2.59807621135332 - 19.5 2.59807621135332 - 20.5 2.59807621135332 - 21.5 2.59807621135332 - 22.5 2.59807621135332 - 23.5 2.59807621135332 - 24.5 2.59807621135332 - 25.5 2.59807621135332 - 26.5 2.59807621135332 - 27.5 2.59807621135332 - 28.5 2.59807621135332 - 29.5 2.59807621135332 - 30.5 2.59807621135332 - 31.5 2.59807621135332 - 32.5 2.59807621135332 - 33.5 2.59807621135332 - 34.5 2.59807621135332 - 35.5 2.59807621135332 - 36.5 2.59807621135332 - 37.5 2.59807621135332 - 38.5 2.59807621135332 - 39.5 2.59807621135332 - 1 3.46410161513775 - 2 3.46410161513775 - 3 3.46410161513775 - 4 3.46410161513775 - 5 3.46410161513775 - 6 3.46410161513775 - 7 3.46410161513775 - 8 3.46410161513775 - 9 3.46410161513775 - 10 3.46410161513775 - 11 3.46410161513775 - 12 3.46410161513775 - 13 3.46410161513775 - 14 3.46410161513775 - 15 3.46410161513775 - 16 3.46410161513775 - 17 3.46410161513775 - 18 3.46410161513775 - 19 3.46410161513775 - 20 3.46410161513775 - 21 3.46410161513775 - 22 3.46410161513775 - 23 3.46410161513775 - 24 3.46410161513775 - 25 3.46410161513775 - 26 3.46410161513775 - 27 3.46410161513775 - 28 3.46410161513775 - 29 3.46410161513775 - 30 3.46410161513775 - 31 3.46410161513775 - 32 3.46410161513775 - 33 3.46410161513775 - 34 3.46410161513775 - 35 3.46410161513775 - 36 3.46410161513775 - 37 3.46410161513775 - 38 3.46410161513775 - 39 3.46410161513775 - 40 3.46410161513775 - 0.5 4.33012701892219 - 1.5 4.33012701892219 - 2.5 4.33012701892219 - 3.5 4.33012701892219 - 4.5 4.33012701892219 - 5.5 4.33012701892219 - 6.5 4.33012701892219 - 7.5 4.33012701892219 - 8.5 4.33012701892219 - 9.5 4.33012701892219 - 10.5 4.33012701892219 - 11.5 4.33012701892219 - 12.5 4.33012701892219 - 13.5 4.33012701892219 - 14.5 4.33012701892219 - 15.5 4.33012701892219 - 16.5 4.33012701892219 - 17.5 4.33012701892219 - 18.5 4.33012701892219 - 19.5 4.33012701892219 - 20.5 4.33012701892219 - 21.5 4.33012701892219 - 22.5 4.33012701892219 - 23.5 4.33012701892219 - 24.5 4.33012701892219 - 25.5 4.33012701892219 - 26.5 4.33012701892219 - 27.5 4.33012701892219 - 28.5 4.33012701892219 - 29.5 4.33012701892219 - 30.5 4.33012701892219 - 31.5 4.33012701892219 - 32.5 4.33012701892219 - 33.5 4.33012701892219 - 34.5 4.33012701892219 - 35.5 4.33012701892219 - 36.5 4.33012701892219 - 37.5 4.33012701892219 - 38.5 4.33012701892219 - 39.5 4.33012701892219 - 1 5.19615242270663 - 2 5.19615242270663 - 3 5.19615242270663 - 4 5.19615242270663 - 5 5.19615242270663 - 6 5.19615242270663 - 7 5.19615242270663 - 8 5.19615242270663 - 9 5.19615242270663 - 10 5.19615242270663 - 11 5.19615242270663 - 12 5.19615242270663 - 13 5.19615242270663 - 14 5.19615242270663 - 15 5.19615242270663 - 16 5.19615242270663 - 17 5.19615242270663 - 18 5.19615242270663 - 19 5.19615242270663 - 20 5.19615242270663 - 21 5.19615242270663 - 22 5.19615242270663 - 23 5.19615242270663 - 24 5.19615242270663 - 25 5.19615242270663 - 26 5.19615242270663 - 27 5.19615242270663 - 28 5.19615242270663 - 29 5.19615242270663 - 30 5.19615242270663 - 31 5.19615242270663 - 32 5.19615242270663 - 33 5.19615242270663 - 34 5.19615242270663 - 35 5.19615242270663 - 36 5.19615242270663 - 37 5.19615242270663 - 38 5.19615242270663 - 39 5.19615242270663 - 40 5.19615242270663 - 0.5 6.06217782649107 - 1.5 6.06217782649107 - 2.5 6.06217782649107 - 3.5 6.06217782649107 - 4.5 6.06217782649107 - 5.5 6.06217782649107 - 6.5 6.06217782649107 - 7.5 6.06217782649107 - 8.5 6.06217782649107 - 9.5 6.06217782649107 - 10.5 6.06217782649107 - 11.5 6.06217782649107 - 12.5 6.06217782649107 - 13.5 6.06217782649107 - 14.5 6.06217782649107 - 15.5 6.06217782649107 - 16.5 6.06217782649107 - 17.5 6.06217782649107 - 18.5 6.06217782649107 - 19.5 6.06217782649107 - 20.5 6.06217782649107 - 21.5 6.06217782649107 - 22.5 6.06217782649107 - 23.5 6.06217782649107 - 24.5 6.06217782649107 - 25.5 6.06217782649107 - 26.5 6.06217782649107 - 27.5 6.06217782649107 - 28.5 6.06217782649107 - 29.5 6.06217782649107 - 30.5 6.06217782649107 - 31.5 6.06217782649107 - 32.5 6.06217782649107 - 33.5 6.06217782649107 - 34.5 6.06217782649107 - 35.5 6.06217782649107 - 36.5 6.06217782649107 - 37.5 6.06217782649107 - 38.5 6.06217782649107 - 39.5 6.06217782649107 - 1 6.92820323027551 - 2 6.92820323027551 - 3 6.92820323027551 - 4 6.92820323027551 - 5 6.92820323027551 - 6 6.92820323027551 - 7 6.92820323027551 - 8 6.92820323027551 - 9 6.92820323027551 - 10 6.92820323027551 - 11 6.92820323027551 - 12 6.92820323027551 - 13 6.92820323027551 - 14 6.92820323027551 - 15 6.92820323027551 - 16 6.92820323027551 - 17 6.92820323027551 - 18 6.92820323027551 - 19 6.92820323027551 - 20 6.92820323027551 - 21 6.92820323027551 - 22 6.92820323027551 - 23 6.92820323027551 - 24 6.92820323027551 - 25 6.92820323027551 - 26 6.92820323027551 - 27 6.92820323027551 - 28 6.92820323027551 - 29 6.92820323027551 - 30 6.92820323027551 - 31 6.92820323027551 - 32 6.92820323027551 - 33 6.92820323027551 - 34 6.92820323027551 - 35 6.92820323027551 - 36 6.92820323027551 - 37 6.92820323027551 - 38 6.92820323027551 - 39 6.92820323027551 - 40 6.92820323027551 - 0.5 7.79422863405995 - 1.5 7.79422863405995 - 2.5 7.79422863405995 - 3.5 7.79422863405995 - 4.5 7.79422863405995 - 5.5 7.79422863405995 - 6.5 7.79422863405995 - 7.5 7.79422863405995 - 8.5 7.79422863405995 - 9.5 7.79422863405995 - 10.5 7.79422863405995 - 11.5 7.79422863405995 - 12.5 7.79422863405995 - 13.5 7.79422863405995 - 14.5 7.79422863405995 - 15.5 7.79422863405995 - 16.5 7.79422863405995 - 17.5 7.79422863405995 - 18.5 7.79422863405995 - 19.5 7.79422863405995 - 20.5 7.79422863405995 - 21.5 7.79422863405995 - 22.5 7.79422863405995 - 23.5 7.79422863405995 - 24.5 7.79422863405995 - 25.5 7.79422863405995 - 26.5 7.79422863405995 - 27.5 7.79422863405995 - 28.5 7.79422863405995 - 29.5 7.79422863405995 - 30.5 7.79422863405995 - 31.5 7.79422863405995 - 32.5 7.79422863405995 - 33.5 7.79422863405995 - 34.5 7.79422863405995 - 35.5 7.79422863405995 - 36.5 7.79422863405995 - 37.5 7.79422863405995 - 38.5 7.79422863405995 - 39.5 7.79422863405995 - 1 8.66025403784439 - 2 8.66025403784439 - 3 8.66025403784439 - 4 8.66025403784439 - 5 8.66025403784439 - 6 8.66025403784439 - 7 8.66025403784439 - 8 8.66025403784439 - 9 8.66025403784439 - 10 8.66025403784439 - 11 8.66025403784439 - 12 8.66025403784439 - 13 8.66025403784439 - 14 8.66025403784439 - 15 8.66025403784439 - 16 8.66025403784439 - 17 8.66025403784439 - 18 8.66025403784439 - 19 8.66025403784439 - 20 8.66025403784439 - 21 8.66025403784439 - 22 8.66025403784439 - 23 8.66025403784439 - 24 8.66025403784439 - 25 8.66025403784439 - 26 8.66025403784439 - 27 8.66025403784439 - 28 8.66025403784439 - 29 8.66025403784439 - 30 8.66025403784439 - 31 8.66025403784439 - 32 8.66025403784439 - 33 8.66025403784439 - 34 8.66025403784439 - 35 8.66025403784439 - 36 8.66025403784439 - 37 8.66025403784439 - 38 8.66025403784439 - 39 8.66025403784439 - 40 8.66025403784439 - 0.5 9.52627944162882 - 1.5 9.52627944162882 - 2.5 9.52627944162882 - 3.5 9.52627944162882 - 4.5 9.52627944162882 - 5.5 9.52627944162882 - 6.5 9.52627944162882 - 7.5 9.52627944162882 - 8.5 9.52627944162882 - 9.5 9.52627944162882 - 10.5 9.52627944162882 - 11.5 9.52627944162882 - 12.5 9.52627944162882 - 13.5 9.52627944162882 - 14.5 9.52627944162882 - 15.5 9.52627944162882 - 16.5 9.52627944162882 - 17.5 9.52627944162882 - 18.5 9.52627944162882 - 19.5 9.52627944162882 - 20.5 9.52627944162882 - 21.5 9.52627944162882 - 22.5 9.52627944162882 - 23.5 9.52627944162882 - 24.5 9.52627944162882 - 25.5 9.52627944162882 - 26.5 9.52627944162882 - 27.5 9.52627944162882 - 28.5 9.52627944162882 - 29.5 9.52627944162882 - 30.5 9.52627944162882 - 31.5 9.52627944162882 - 32.5 9.52627944162882 - 33.5 9.52627944162882 - 34.5 9.52627944162882 - 35.5 9.52627944162882 - 36.5 9.52627944162882 - 37.5 9.52627944162882 - 38.5 9.52627944162882 - 39.5 9.52627944162882 - 1 10.3923048454133 - 2 10.3923048454133 - 3 10.3923048454133 - 4 10.3923048454133 - 5 10.3923048454133 - 6 10.3923048454133 - 7 10.3923048454133 - 8 10.3923048454133 - 9 10.3923048454133 - 10 10.3923048454133 - 11 10.3923048454133 - 12 10.3923048454133 - 13 10.3923048454133 - 14 10.3923048454133 - 15 10.3923048454133 - 16 10.3923048454133 - 17 10.3923048454133 - 18 10.3923048454133 - 19 10.3923048454133 - 20 10.3923048454133 - 21 10.3923048454133 - 22 10.3923048454133 - 23 10.3923048454133 - 24 10.3923048454133 - 25 10.3923048454133 - 26 10.3923048454133 - 27 10.3923048454133 - 28 10.3923048454133 - 29 10.3923048454133 - 30 10.3923048454133 - 31 10.3923048454133 - 32 10.3923048454133 - 33 10.3923048454133 - 34 10.3923048454133 - 35 10.3923048454133 - 36 10.3923048454133 - 37 10.3923048454133 - 38 10.3923048454133 - 39 10.3923048454133 - 40 10.3923048454133 - 0.5 11.2583302491977 - 1.5 11.2583302491977 - 2.5 11.2583302491977 - 3.5 11.2583302491977 - 4.5 11.2583302491977 - 5.5 11.2583302491977 - 6.5 11.2583302491977 - 7.5 11.2583302491977 - 8.5 11.2583302491977 - 9.5 11.2583302491977 - 10.5 11.2583302491977 - 11.5 11.2583302491977 - 12.5 11.2583302491977 - 13.5 11.2583302491977 - 14.5 11.2583302491977 - 15.5 11.2583302491977 - 16.5 11.2583302491977 - 17.5 11.2583302491977 - 18.5 11.2583302491977 - 19.5 11.2583302491977 - 20.5 11.2583302491977 - 21.5 11.2583302491977 - 22.5 11.2583302491977 - 23.5 11.2583302491977 - 24.5 11.2583302491977 - 25.5 11.2583302491977 - 26.5 11.2583302491977 - 27.5 11.2583302491977 - 28.5 11.2583302491977 - 29.5 11.2583302491977 - 30.5 11.2583302491977 - 31.5 11.2583302491977 - 32.5 11.2583302491977 - 33.5 11.2583302491977 - 34.5 11.2583302491977 - 35.5 11.2583302491977 - 36.5 11.2583302491977 - 37.5 11.2583302491977 - 38.5 11.2583302491977 - 39.5 11.2583302491977 - 1 12.1243556529821 - 2 12.1243556529821 - 3 12.1243556529821 - 4 12.1243556529821 - 5 12.1243556529821 - 6 12.1243556529821 - 7 12.1243556529821 - 8 12.1243556529821 - 9 12.1243556529821 - 10 12.1243556529821 - 11 12.1243556529821 - 12 12.1243556529821 - 13 12.1243556529821 - 14 12.1243556529821 - 15 12.1243556529821 - 16 12.1243556529821 - 17 12.1243556529821 - 18 12.1243556529821 - 19 12.1243556529821 - 20 12.1243556529821 - 21 12.1243556529821 - 22 12.1243556529821 - 23 12.1243556529821 - 24 12.1243556529821 - 25 12.1243556529821 - 26 12.1243556529821 - 27 12.1243556529821 - 28 12.1243556529821 - 29 12.1243556529821 - 30 12.1243556529821 - 31 12.1243556529821 - 32 12.1243556529821 - 33 12.1243556529821 - 34 12.1243556529821 - 35 12.1243556529821 - 36 12.1243556529821 - 37 12.1243556529821 - 38 12.1243556529821 - 39 12.1243556529821 - 40 12.1243556529821 - 0.5 12.9903810567666 - 1.5 12.9903810567666 - 2.5 12.9903810567666 - 3.5 12.9903810567666 - 4.5 12.9903810567666 - 5.5 12.9903810567666 - 6.5 12.9903810567666 - 7.5 12.9903810567666 - 8.5 12.9903810567666 - 9.5 12.9903810567666 - 10.5 12.9903810567666 - 11.5 12.9903810567666 - 12.5 12.9903810567666 - 13.5 12.9903810567666 - 14.5 12.9903810567666 - 15.5 12.9903810567666 - 16.5 12.9903810567666 - 17.5 12.9903810567666 - 18.5 12.9903810567666 - 19.5 12.9903810567666 - 20.5 12.9903810567666 - 21.5 12.9903810567666 - 22.5 12.9903810567666 - 23.5 12.9903810567666 - 24.5 12.9903810567666 - 25.5 12.9903810567666 - 26.5 12.9903810567666 - 27.5 12.9903810567666 - 28.5 12.9903810567666 - 29.5 12.9903810567666 - 30.5 12.9903810567666 - 31.5 12.9903810567666 - 32.5 12.9903810567666 - 33.5 12.9903810567666 - 34.5 12.9903810567666 - 35.5 12.9903810567666 - 36.5 12.9903810567666 - 37.5 12.9903810567666 - 38.5 12.9903810567666 - 39.5 12.9903810567666 - 1 13.856406460551 - 2 13.856406460551 - 3 13.856406460551 - 4 13.856406460551 - 5 13.856406460551 - 6 13.856406460551 - 7 13.856406460551 - 8 13.856406460551 - 9 13.856406460551 - 10 13.856406460551 - 11 13.856406460551 - 12 13.856406460551 - 13 13.856406460551 - 14 13.856406460551 - 15 13.856406460551 - 16 13.856406460551 - 17 13.856406460551 - 18 13.856406460551 - 19 13.856406460551 - 20 13.856406460551 - 21 13.856406460551 - 22 13.856406460551 - 23 13.856406460551 - 24 13.856406460551 - 25 13.856406460551 - 26 13.856406460551 - 27 13.856406460551 - 28 13.856406460551 - 29 13.856406460551 - 30 13.856406460551 - 31 13.856406460551 - 32 13.856406460551 - 33 13.856406460551 - 34 13.856406460551 - 35 13.856406460551 - 36 13.856406460551 - 37 13.856406460551 - 38 13.856406460551 - 39 13.856406460551 - 40 13.856406460551 - 0.5 14.7224318643355 - 1.5 14.7224318643355 - 2.5 14.7224318643355 - 3.5 14.7224318643355 - 4.5 14.7224318643355 - 5.5 14.7224318643355 - 6.5 14.7224318643355 - 7.5 14.7224318643355 - 8.5 14.7224318643355 - 9.5 14.7224318643355 - 10.5 14.7224318643355 - 11.5 14.7224318643355 - 12.5 14.7224318643355 - 13.5 14.7224318643355 - 14.5 14.7224318643355 - 15.5 14.7224318643355 - 16.5 14.7224318643355 - 17.5 14.7224318643355 - 18.5 14.7224318643355 - 19.5 14.7224318643355 - 20.5 14.7224318643355 - 21.5 14.7224318643355 - 22.5 14.7224318643355 - 23.5 14.7224318643355 - 24.5 14.7224318643355 - 25.5 14.7224318643355 - 26.5 14.7224318643355 - 27.5 14.7224318643355 - 28.5 14.7224318643355 - 29.5 14.7224318643355 - 30.5 14.7224318643355 - 31.5 14.7224318643355 - 32.5 14.7224318643355 - 33.5 14.7224318643355 - 34.5 14.7224318643355 - 35.5 14.7224318643355 - 36.5 14.7224318643355 - 37.5 14.7224318643355 - 38.5 14.7224318643355 - 39.5 14.7224318643355 - 1 15.5884572681199 - 2 15.5884572681199 - 3 15.5884572681199 - 4 15.5884572681199 - 5 15.5884572681199 - 6 15.5884572681199 - 7 15.5884572681199 - 8 15.5884572681199 - 9 15.5884572681199 - 10 15.5884572681199 - 11 15.5884572681199 - 12 15.5884572681199 - 13 15.5884572681199 - 14 15.5884572681199 - 15 15.5884572681199 - 16 15.5884572681199 - 17 15.5884572681199 - 18 15.5884572681199 - 19 15.5884572681199 - 20 15.5884572681199 - 21 15.5884572681199 - 22 15.5884572681199 - 23 15.5884572681199 - 24 15.5884572681199 - 25 15.5884572681199 - 26 15.5884572681199 - 27 15.5884572681199 - 28 15.5884572681199 - 29 15.5884572681199 - 30 15.5884572681199 - 31 15.5884572681199 - 32 15.5884572681199 - 33 15.5884572681199 - 34 15.5884572681199 - 35 15.5884572681199 - 36 15.5884572681199 - 37 15.5884572681199 - 38 15.5884572681199 - 39 15.5884572681199 - 40 15.5884572681199 - 0.5 16.4544826719043 - 1.5 16.4544826719043 - 2.5 16.4544826719043 - 3.5 16.4544826719043 - 4.5 16.4544826719043 - 5.5 16.4544826719043 - 6.5 16.4544826719043 - 7.5 16.4544826719043 - 8.5 16.4544826719043 - 9.5 16.4544826719043 - 10.5 16.4544826719043 - 11.5 16.4544826719043 - 12.5 16.4544826719043 - 13.5 16.4544826719043 - 14.5 16.4544826719043 - 15.5 16.4544826719043 - 16.5 16.4544826719043 - 17.5 16.4544826719043 - 18.5 16.4544826719043 - 19.5 16.4544826719043 - 20.5 16.4544826719043 - 21.5 16.4544826719043 - 22.5 16.4544826719043 - 23.5 16.4544826719043 - 24.5 16.4544826719043 - 25.5 16.4544826719043 - 26.5 16.4544826719043 - 27.5 16.4544826719043 - 28.5 16.4544826719043 - 29.5 16.4544826719043 - 30.5 16.4544826719043 - 31.5 16.4544826719043 - 32.5 16.4544826719043 - 33.5 16.4544826719043 - 34.5 16.4544826719043 - 35.5 16.4544826719043 - 36.5 16.4544826719043 - 37.5 16.4544826719043 - 38.5 16.4544826719043 - 39.5 16.4544826719043 - 1 17.3205080756888 - 2 17.3205080756888 - 3 17.3205080756888 - 4 17.3205080756888 - 5 17.3205080756888 - 6 17.3205080756888 - 7 17.3205080756888 - 8 17.3205080756888 - 9 17.3205080756888 - 10 17.3205080756888 - 11 17.3205080756888 - 12 17.3205080756888 - 13 17.3205080756888 - 14 17.3205080756888 - 15 17.3205080756888 - 16 17.3205080756888 - 17 17.3205080756888 - 18 17.3205080756888 - 19 17.3205080756888 - 20 17.3205080756888 - 21 17.3205080756888 - 22 17.3205080756888 - 23 17.3205080756888 - 24 17.3205080756888 - 25 17.3205080756888 - 26 17.3205080756888 - 27 17.3205080756888 - 28 17.3205080756888 - 29 17.3205080756888 - 30 17.3205080756888 - 31 17.3205080756888 - 32 17.3205080756888 - 33 17.3205080756888 - 34 17.3205080756888 - 35 17.3205080756888 - 36 17.3205080756888 - 37 17.3205080756888 - 38 17.3205080756888 - 39 17.3205080756888 - 40 17.3205080756888 - 0.5 18.1865334794732 - 1.5 18.1865334794732 - 2.5 18.1865334794732 - 3.5 18.1865334794732 - 4.5 18.1865334794732 - 5.5 18.1865334794732 - 6.5 18.1865334794732 - 7.5 18.1865334794732 - 8.5 18.1865334794732 - 9.5 18.1865334794732 - 10.5 18.1865334794732 - 11.5 18.1865334794732 - 12.5 18.1865334794732 - 13.5 18.1865334794732 - 14.5 18.1865334794732 - 15.5 18.1865334794732 - 16.5 18.1865334794732 - 17.5 18.1865334794732 - 18.5 18.1865334794732 - 19.5 18.1865334794732 - 20.5 18.1865334794732 - 21.5 18.1865334794732 - 22.5 18.1865334794732 - 23.5 18.1865334794732 - 24.5 18.1865334794732 - 25.5 18.1865334794732 - 26.5 18.1865334794732 - 27.5 18.1865334794732 - 28.5 18.1865334794732 - 29.5 18.1865334794732 - 30.5 18.1865334794732 - 31.5 18.1865334794732 - 32.5 18.1865334794732 - 33.5 18.1865334794732 - 34.5 18.1865334794732 - 35.5 18.1865334794732 - 36.5 18.1865334794732 - 37.5 18.1865334794732 - 38.5 18.1865334794732 - 39.5 18.1865334794732 - 1 19.0525588832576 - 2 19.0525588832576 - 3 19.0525588832576 - 4 19.0525588832576 - 5 19.0525588832576 - 6 19.0525588832576 - 7 19.0525588832576 - 8 19.0525588832576 - 9 19.0525588832576 - 10 19.0525588832576 - 11 19.0525588832576 - 12 19.0525588832576 - 13 19.0525588832576 - 14 19.0525588832576 - 15 19.0525588832576 - 16 19.0525588832576 - 17 19.0525588832576 - 18 19.0525588832576 - 19 19.0525588832576 - 20 19.0525588832576 - 21 19.0525588832576 - 22 19.0525588832576 - 23 19.0525588832576 - 24 19.0525588832576 - 25 19.0525588832576 - 26 19.0525588832576 - 27 19.0525588832576 - 28 19.0525588832576 - 29 19.0525588832576 - 30 19.0525588832576 - 31 19.0525588832576 - 32 19.0525588832576 - 33 19.0525588832576 - 34 19.0525588832576 - 35 19.0525588832576 - 36 19.0525588832576 - 37 19.0525588832576 - 38 19.0525588832576 - 39 19.0525588832576 - 40 19.0525588832576 - 0.5 19.9185842870421 - 1.5 19.9185842870421 - 2.5 19.9185842870421 - 3.5 19.9185842870421 - 4.5 19.9185842870421 - 5.5 19.9185842870421 - 6.5 19.9185842870421 - 7.5 19.9185842870421 - 8.5 19.9185842870421 - 9.5 19.9185842870421 - 10.5 19.9185842870421 - 11.5 19.9185842870421 - 12.5 19.9185842870421 - 13.5 19.9185842870421 - 14.5 19.9185842870421 - 15.5 19.9185842870421 - 16.5 19.9185842870421 - 17.5 19.9185842870421 - 18.5 19.9185842870421 - 19.5 19.9185842870421 - 20.5 19.9185842870421 - 21.5 19.9185842870421 - 22.5 19.9185842870421 - 23.5 19.9185842870421 - 24.5 19.9185842870421 - 25.5 19.9185842870421 - 26.5 19.9185842870421 - 27.5 19.9185842870421 - 28.5 19.9185842870421 - 29.5 19.9185842870421 - 30.5 19.9185842870421 - 31.5 19.9185842870421 - 32.5 19.9185842870421 - 33.5 19.9185842870421 - 34.5 19.9185842870421 - 35.5 19.9185842870421 - 36.5 19.9185842870421 - 37.5 19.9185842870421 - 38.5 19.9185842870421 - 39.5 19.9185842870421 - 1 20.7846096908265 - 2 20.7846096908265 - 3 20.7846096908265 - 4 20.7846096908265 - 5 20.7846096908265 - 6 20.7846096908265 - 7 20.7846096908265 - 8 20.7846096908265 - 9 20.7846096908265 - 10 20.7846096908265 - 11 20.7846096908265 - 12 20.7846096908265 - 13 20.7846096908265 - 14 20.7846096908265 - 15 20.7846096908265 - 16 20.7846096908265 - 17 20.7846096908265 - 18 20.7846096908265 - 19 20.7846096908265 - 20 20.7846096908265 - 21 20.7846096908265 - 22 20.7846096908265 - 23 20.7846096908265 - 24 20.7846096908265 - 25 20.7846096908265 - 26 20.7846096908265 - 27 20.7846096908265 - 28 20.7846096908265 - 29 20.7846096908265 - 30 20.7846096908265 - 31 20.7846096908265 - 32 20.7846096908265 - 33 20.7846096908265 - 34 20.7846096908265 - 35 20.7846096908265 - 36 20.7846096908265 - 37 20.7846096908265 - 38 20.7846096908265 - 39 20.7846096908265 - 40 20.7846096908265 - 0.5 21.650635094611 - 1.5 21.650635094611 - 2.5 21.650635094611 - 3.5 21.650635094611 - 4.5 21.650635094611 - 5.5 21.650635094611 - 6.5 21.650635094611 - 7.5 21.650635094611 - 8.5 21.650635094611 - 9.5 21.650635094611 - 10.5 21.650635094611 - 11.5 21.650635094611 - 12.5 21.650635094611 - 13.5 21.650635094611 - 14.5 21.650635094611 - 15.5 21.650635094611 - 16.5 21.650635094611 - 17.5 21.650635094611 - 18.5 21.650635094611 - 19.5 21.650635094611 - 20.5 21.650635094611 - 21.5 21.650635094611 - 22.5 21.650635094611 - 23.5 21.650635094611 - 24.5 21.650635094611 - 25.5 21.650635094611 - 26.5 21.650635094611 - 27.5 21.650635094611 - 28.5 21.650635094611 - 29.5 21.650635094611 - 30.5 21.650635094611 - 31.5 21.650635094611 - 32.5 21.650635094611 - 33.5 21.650635094611 - 34.5 21.650635094611 - 35.5 21.650635094611 - 36.5 21.650635094611 - 37.5 21.650635094611 - 38.5 21.650635094611 - 39.5 21.650635094611 - 1 22.5166604983954 - 2 22.5166604983954 - 3 22.5166604983954 - 4 22.5166604983954 - 5 22.5166604983954 - 6 22.5166604983954 - 7 22.5166604983954 - 8 22.5166604983954 - 9 22.5166604983954 - 10 22.5166604983954 - 11 22.5166604983954 - 12 22.5166604983954 - 13 22.5166604983954 - 14 22.5166604983954 - 15 22.5166604983954 - 16 22.5166604983954 - 17 22.5166604983954 - 18 22.5166604983954 - 19 22.5166604983954 - 20 22.5166604983954 - 21 22.5166604983954 - 22 22.5166604983954 - 23 22.5166604983954 - 24 22.5166604983954 - 25 22.5166604983954 - 26 22.5166604983954 - 27 22.5166604983954 - 28 22.5166604983954 - 29 22.5166604983954 - 30 22.5166604983954 - 31 22.5166604983954 - 32 22.5166604983954 - 33 22.5166604983954 - 34 22.5166604983954 - 35 22.5166604983954 - 36 22.5166604983954 - 37 22.5166604983954 - 38 22.5166604983954 - 39 22.5166604983954 - 40 22.5166604983954 - 0.5 23.3826859021798 - 1.5 23.3826859021798 - 2.5 23.3826859021798 - 3.5 23.3826859021798 - 4.5 23.3826859021798 - 5.5 23.3826859021798 - 6.5 23.3826859021798 - 7.5 23.3826859021798 - 8.5 23.3826859021798 - 9.5 23.3826859021798 - 10.5 23.3826859021798 - 11.5 23.3826859021798 - 12.5 23.3826859021798 - 13.5 23.3826859021798 - 14.5 23.3826859021798 - 15.5 23.3826859021798 - 16.5 23.3826859021798 - 17.5 23.3826859021798 - 18.5 23.3826859021798 - 19.5 23.3826859021798 - 20.5 23.3826859021798 - 21.5 23.3826859021798 - 22.5 23.3826859021798 - 23.5 23.3826859021798 - 24.5 23.3826859021798 - 25.5 23.3826859021798 - 26.5 23.3826859021798 - 27.5 23.3826859021798 - 28.5 23.3826859021798 - 29.5 23.3826859021798 - 30.5 23.3826859021798 - 31.5 23.3826859021798 - 32.5 23.3826859021798 - 33.5 23.3826859021798 - 34.5 23.3826859021798 - 35.5 23.3826859021798 - 36.5 23.3826859021798 - 37.5 23.3826859021798 - 38.5 23.3826859021798 - 39.5 23.3826859021798 - 1 24.2487113059643 - 2 24.2487113059643 - 3 24.2487113059643 - 4 24.2487113059643 - 5 24.2487113059643 - 6 24.2487113059643 - 7 24.2487113059643 - 8 24.2487113059643 - 9 24.2487113059643 - 10 24.2487113059643 - 11 24.2487113059643 - 12 24.2487113059643 - 13 24.2487113059643 - 14 24.2487113059643 - 15 24.2487113059643 - 16 24.2487113059643 - 17 24.2487113059643 - 18 24.2487113059643 - 19 24.2487113059643 - 20 24.2487113059643 - 21 24.2487113059643 - 22 24.2487113059643 - 23 24.2487113059643 - 24 24.2487113059643 - 25 24.2487113059643 - 26 24.2487113059643 - 27 24.2487113059643 - 28 24.2487113059643 - 29 24.2487113059643 - 30 24.2487113059643 - 31 24.2487113059643 - 32 24.2487113059643 - 33 24.2487113059643 - 34 24.2487113059643 - 35 24.2487113059643 - 36 24.2487113059643 - 37 24.2487113059643 - 38 24.2487113059643 - 39 24.2487113059643 - 40 24.2487113059643 - 0.5 25.1147367097487 - 1.5 25.1147367097487 - 2.5 25.1147367097487 - 3.5 25.1147367097487 - 4.5 25.1147367097487 - 5.5 25.1147367097487 - 6.5 25.1147367097487 - 7.5 25.1147367097487 - 8.5 25.1147367097487 - 9.5 25.1147367097487 - 10.5 25.1147367097487 - 11.5 25.1147367097487 - 12.5 25.1147367097487 - 13.5 25.1147367097487 - 14.5 25.1147367097487 - 15.5 25.1147367097487 - 16.5 25.1147367097487 - 17.5 25.1147367097487 - 18.5 25.1147367097487 - 19.5 25.1147367097487 - 20.5 25.1147367097487 - 21.5 25.1147367097487 - 22.5 25.1147367097487 - 23.5 25.1147367097487 - 24.5 25.1147367097487 - 25.5 25.1147367097487 - 26.5 25.1147367097487 - 27.5 25.1147367097487 - 28.5 25.1147367097487 - 29.5 25.1147367097487 - 30.5 25.1147367097487 - 31.5 25.1147367097487 - 32.5 25.1147367097487 - 33.5 25.1147367097487 - 34.5 25.1147367097487 - 35.5 25.1147367097487 - 36.5 25.1147367097487 - 37.5 25.1147367097487 - 38.5 25.1147367097487 - 39.5 25.1147367097487 - 1 25.9807621135332 - 2 25.9807621135332 - 3 25.9807621135332 - 4 25.9807621135332 - 5 25.9807621135332 - 6 25.9807621135332 - 7 25.9807621135332 - 8 25.9807621135332 - 9 25.9807621135332 - 10 25.9807621135332 - 11 25.9807621135332 - 12 25.9807621135332 - 13 25.9807621135332 - 14 25.9807621135332 - 15 25.9807621135332 - 16 25.9807621135332 - 17 25.9807621135332 - 18 25.9807621135332 - 19 25.9807621135332 - 20 25.9807621135332 - 21 25.9807621135332 - 22 25.9807621135332 - 23 25.9807621135332 - 24 25.9807621135332 - 25 25.9807621135332 - 26 25.9807621135332 - 27 25.9807621135332 - 28 25.9807621135332 - 29 25.9807621135332 - 30 25.9807621135332 - 31 25.9807621135332 - 32 25.9807621135332 - 33 25.9807621135332 - 34 25.9807621135332 - 35 25.9807621135332 - 36 25.9807621135332 - 37 25.9807621135332 - 38 25.9807621135332 - 39 25.9807621135332 - 40 25.9807621135332 - 0.5 26.8467875173176 - 1.5 26.8467875173176 - 2.5 26.8467875173176 - 3.5 26.8467875173176 - 4.5 26.8467875173176 - 5.5 26.8467875173176 - 6.5 26.8467875173176 - 7.5 26.8467875173176 - 8.5 26.8467875173176 - 9.5 26.8467875173176 - 10.5 26.8467875173176 - 11.5 26.8467875173176 - 12.5 26.8467875173176 - 13.5 26.8467875173176 - 14.5 26.8467875173176 - 15.5 26.8467875173176 - 16.5 26.8467875173176 - 17.5 26.8467875173176 - 18.5 26.8467875173176 - 19.5 26.8467875173176 - 20.5 26.8467875173176 - 21.5 26.8467875173176 - 22.5 26.8467875173176 - 23.5 26.8467875173176 - 24.5 26.8467875173176 - 25.5 26.8467875173176 - 26.5 26.8467875173176 - 27.5 26.8467875173176 - 28.5 26.8467875173176 - 29.5 26.8467875173176 - 30.5 26.8467875173176 - 31.5 26.8467875173176 - 32.5 26.8467875173176 - 33.5 26.8467875173176 - 34.5 26.8467875173176 - 35.5 26.8467875173176 - 36.5 26.8467875173176 - 37.5 26.8467875173176 - 38.5 26.8467875173176 - 39.5 26.8467875173176 - 1 27.712812921102 - 2 27.712812921102 - 3 27.712812921102 - 4 27.712812921102 - 5 27.712812921102 - 6 27.712812921102 - 7 27.712812921102 - 8 27.712812921102 - 9 27.712812921102 - 10 27.712812921102 - 11 27.712812921102 - 12 27.712812921102 - 13 27.712812921102 - 14 27.712812921102 - 15 27.712812921102 - 16 27.712812921102 - 17 27.712812921102 - 18 27.712812921102 - 19 27.712812921102 - 20 27.712812921102 - 21 27.712812921102 - 22 27.712812921102 - 23 27.712812921102 - 24 27.712812921102 - 25 27.712812921102 - 26 27.712812921102 - 27 27.712812921102 - 28 27.712812921102 - 29 27.712812921102 - 30 27.712812921102 - 31 27.712812921102 - 32 27.712812921102 - 33 27.712812921102 - 34 27.712812921102 - 35 27.712812921102 - 36 27.712812921102 - 37 27.712812921102 - 38 27.712812921102 - 39 27.712812921102 - 40 27.712812921102 - 0.5 28.5788383248865 - 1.5 28.5788383248865 - 2.5 28.5788383248865 - 3.5 28.5788383248865 - 4.5 28.5788383248865 - 5.5 28.5788383248865 - 6.5 28.5788383248865 - 7.5 28.5788383248865 - 8.5 28.5788383248865 - 9.5 28.5788383248865 - 10.5 28.5788383248865 - 11.5 28.5788383248865 - 12.5 28.5788383248865 - 13.5 28.5788383248865 - 14.5 28.5788383248865 - 15.5 28.5788383248865 - 16.5 28.5788383248865 - 17.5 28.5788383248865 - 18.5 28.5788383248865 - 19.5 28.5788383248865 - 20.5 28.5788383248865 - 21.5 28.5788383248865 - 22.5 28.5788383248865 - 23.5 28.5788383248865 - 24.5 28.5788383248865 - 25.5 28.5788383248865 - 26.5 28.5788383248865 - 27.5 28.5788383248865 - 28.5 28.5788383248865 - 29.5 28.5788383248865 - 30.5 28.5788383248865 - 31.5 28.5788383248865 - 32.5 28.5788383248865 - 33.5 28.5788383248865 - 34.5 28.5788383248865 - 35.5 28.5788383248865 - 36.5 28.5788383248865 - 37.5 28.5788383248865 - 38.5 28.5788383248865 - 39.5 28.5788383248865 - 1 29.4448637286709 - 2 29.4448637286709 - 3 29.4448637286709 - 4 29.4448637286709 - 5 29.4448637286709 - 6 29.4448637286709 - 7 29.4448637286709 - 8 29.4448637286709 - 9 29.4448637286709 - 10 29.4448637286709 - 11 29.4448637286709 - 12 29.4448637286709 - 13 29.4448637286709 - 14 29.4448637286709 - 15 29.4448637286709 - 16 29.4448637286709 - 17 29.4448637286709 - 18 29.4448637286709 - 19 29.4448637286709 - 20 29.4448637286709 - 21 29.4448637286709 - 22 29.4448637286709 - 23 29.4448637286709 - 24 29.4448637286709 - 25 29.4448637286709 - 26 29.4448637286709 - 27 29.4448637286709 - 28 29.4448637286709 - 29 29.4448637286709 - 30 29.4448637286709 - 31 29.4448637286709 - 32 29.4448637286709 - 33 29.4448637286709 - 34 29.4448637286709 - 35 29.4448637286709 - 36 29.4448637286709 - 37 29.4448637286709 - 38 29.4448637286709 - 39 29.4448637286709 - 40 29.4448637286709 - 0.5 30.3108891324554 - 1.5 30.3108891324554 - 2.5 30.3108891324554 - 3.5 30.3108891324554 - 4.5 30.3108891324554 - 5.5 30.3108891324554 - 6.5 30.3108891324554 - 7.5 30.3108891324554 - 8.5 30.3108891324554 - 9.5 30.3108891324554 - 10.5 30.3108891324554 - 11.5 30.3108891324554 - 12.5 30.3108891324554 - 13.5 30.3108891324554 - 14.5 30.3108891324554 - 15.5 30.3108891324554 - 16.5 30.3108891324554 - 17.5 30.3108891324554 - 18.5 30.3108891324554 - 19.5 30.3108891324554 - 20.5 30.3108891324554 - 21.5 30.3108891324554 - 22.5 30.3108891324554 - 23.5 30.3108891324554 - 24.5 30.3108891324554 - 25.5 30.3108891324554 - 26.5 30.3108891324554 - 27.5 30.3108891324554 - 28.5 30.3108891324554 - 29.5 30.3108891324554 - 30.5 30.3108891324554 - 31.5 30.3108891324554 - 32.5 30.3108891324554 - 33.5 30.3108891324554 - 34.5 30.3108891324554 - 35.5 30.3108891324554 - 36.5 30.3108891324554 - 37.5 30.3108891324554 - 38.5 30.3108891324554 - 39.5 30.3108891324554 - 1 31.1769145362398 - 2 31.1769145362398 - 3 31.1769145362398 - 4 31.1769145362398 - 5 31.1769145362398 - 6 31.1769145362398 - 7 31.1769145362398 - 8 31.1769145362398 - 9 31.1769145362398 - 10 31.1769145362398 - 11 31.1769145362398 - 12 31.1769145362398 - 13 31.1769145362398 - 14 31.1769145362398 - 15 31.1769145362398 - 16 31.1769145362398 - 17 31.1769145362398 - 18 31.1769145362398 - 19 31.1769145362398 - 20 31.1769145362398 - 21 31.1769145362398 - 22 31.1769145362398 - 23 31.1769145362398 - 24 31.1769145362398 - 25 31.1769145362398 - 26 31.1769145362398 - 27 31.1769145362398 - 28 31.1769145362398 - 29 31.1769145362398 - 30 31.1769145362398 - 31 31.1769145362398 - 32 31.1769145362398 - 33 31.1769145362398 - 34 31.1769145362398 - 35 31.1769145362398 - 36 31.1769145362398 - 37 31.1769145362398 - 38 31.1769145362398 - 39 31.1769145362398 - 40 31.1769145362398 - 0.5 32.0429399400242 - 1.5 32.0429399400242 - 2.5 32.0429399400242 - 3.5 32.0429399400242 - 4.5 32.0429399400242 - 5.5 32.0429399400242 - 6.5 32.0429399400242 - 7.5 32.0429399400242 - 8.5 32.0429399400242 - 9.5 32.0429399400242 - 10.5 32.0429399400242 - 11.5 32.0429399400242 - 12.5 32.0429399400242 - 13.5 32.0429399400242 - 14.5 32.0429399400242 - 15.5 32.0429399400242 - 16.5 32.0429399400242 - 17.5 32.0429399400242 - 18.5 32.0429399400242 - 19.5 32.0429399400242 - 20.5 32.0429399400242 - 21.5 32.0429399400242 - 22.5 32.0429399400242 - 23.5 32.0429399400242 - 24.5 32.0429399400242 - 25.5 32.0429399400242 - 26.5 32.0429399400242 - 27.5 32.0429399400242 - 28.5 32.0429399400242 - 29.5 32.0429399400242 - 30.5 32.0429399400242 - 31.5 32.0429399400242 - 32.5 32.0429399400242 - 33.5 32.0429399400242 - 34.5 32.0429399400242 - 35.5 32.0429399400242 - 36.5 32.0429399400242 - 37.5 32.0429399400242 - 38.5 32.0429399400242 - 39.5 32.0429399400242 - 1 32.9089653438087 - 2 32.9089653438087 - 3 32.9089653438087 - 4 32.9089653438087 - 5 32.9089653438087 - 6 32.9089653438087 - 7 32.9089653438087 - 8 32.9089653438087 - 9 32.9089653438087 - 10 32.9089653438087 - 11 32.9089653438087 - 12 32.9089653438087 - 13 32.9089653438087 - 14 32.9089653438087 - 15 32.9089653438087 - 16 32.9089653438087 - 17 32.9089653438087 - 18 32.9089653438087 - 19 32.9089653438087 - 20 32.9089653438087 - 21 32.9089653438087 - 22 32.9089653438087 - 23 32.9089653438087 - 24 32.9089653438087 - 25 32.9089653438087 - 26 32.9089653438087 - 27 32.9089653438087 - 28 32.9089653438087 - 29 32.9089653438087 - 30 32.9089653438087 - 31 32.9089653438087 - 32 32.9089653438087 - 33 32.9089653438087 - 34 32.9089653438087 - 35 32.9089653438087 - 36 32.9089653438087 - 37 32.9089653438087 - 38 32.9089653438087 - 39 32.9089653438087 - 40 32.9089653438087 - 0.5 33.7749907475931 - 1.5 33.7749907475931 - 2.5 33.7749907475931 - 3.5 33.7749907475931 - 4.5 33.7749907475931 - 5.5 33.7749907475931 - 6.5 33.7749907475931 - 7.5 33.7749907475931 - 8.5 33.7749907475931 - 9.5 33.7749907475931 - 10.5 33.7749907475931 - 11.5 33.7749907475931 - 12.5 33.7749907475931 - 13.5 33.7749907475931 - 14.5 33.7749907475931 - 15.5 33.7749907475931 - 16.5 33.7749907475931 - 17.5 33.7749907475931 - 18.5 33.7749907475931 - 19.5 33.7749907475931 - 20.5 33.7749907475931 - 21.5 33.7749907475931 - 22.5 33.7749907475931 - 23.5 33.7749907475931 - 24.5 33.7749907475931 - 25.5 33.7749907475931 - 26.5 33.7749907475931 - 27.5 33.7749907475931 - 28.5 33.7749907475931 - 29.5 33.7749907475931 - 30.5 33.7749907475931 - 31.5 33.7749907475931 - 32.5 33.7749907475931 - 33.5 33.7749907475931 - 34.5 33.7749907475931 - 35.5 33.7749907475931 - 36.5 33.7749907475931 - 37.5 33.7749907475931 - 38.5 33.7749907475931 - 39.5 33.7749907475931 - 1 34.6410161513775 - 2 34.6410161513775 - 3 34.6410161513775 - 4 34.6410161513775 - 5 34.6410161513775 - 6 34.6410161513775 - 7 34.6410161513775 - 8 34.6410161513775 - 9 34.6410161513775 - 10 34.6410161513775 - 11 34.6410161513775 - 12 34.6410161513775 - 13 34.6410161513775 - 14 34.6410161513775 - 15 34.6410161513775 - 16 34.6410161513775 - 17 34.6410161513775 - 18 34.6410161513775 - 19 34.6410161513775 - 20 34.6410161513775 - 21 34.6410161513775 - 22 34.6410161513775 - 23 34.6410161513775 - 24 34.6410161513775 - 25 34.6410161513775 - 26 34.6410161513775 - 27 34.6410161513775 - 28 34.6410161513775 - 29 34.6410161513775 - 30 34.6410161513775 - 31 34.6410161513775 - 32 34.6410161513775 - 33 34.6410161513775 - 34 34.6410161513775 - 35 34.6410161513775 - 36 34.6410161513775 - 37 34.6410161513775 - 38 34.6410161513775 - 39 34.6410161513775 - 40 34.6410161513775 diff --git a/grid_gen/periodic_general/fortune/Makefile b/grid_gen/periodic_general/fortune/Makefile deleted file mode 100644 index 31b2e0bee..000000000 --- a/grid_gen/periodic_general/fortune/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.SUFFIXES: .c .o - - -OBJS = edgelist.o geometry.o heap.o memory.o output.o voronoi.o voronoi_main.o - -all: $(OBJS) - ar -ru libfortune.a $(OBJS) - -clean: - rm -f *.o libfortune.a - -.c.o: - $(CC) $(CFLAGS) -c $< -I../ diff --git a/grid_gen/periodic_general/fortune/edgelist.c b/grid_gen/periodic_general/fortune/edgelist.c deleted file mode 100644 index 95373b988..000000000 --- a/grid_gen/periodic_general/fortune/edgelist.c +++ /dev/null @@ -1,188 +0,0 @@ - -/*** EDGELIST.C ***/ - -#include "vdefs.h" - -int ELhashsize ; -Site * bottomsite ; -Freelist hfl ; -Halfedge * ELleftend, * ELrightend, **ELhash ; - -int ntry, totalsearch ; - -void -ELinitialize(void) -{ - int i ; - - freeinit(&hfl, sizeof(Halfedge)) ; - ELhashsize = 2 * sqrt_nsites ; - ELhash = (Halfedge **)myalloc( sizeof(*ELhash) * ELhashsize) ; - for (i = 0 ; i < ELhashsize ; i++) - { - ELhash[i] = (Halfedge *)NULL ; - } - ELleftend = HEcreate((Edge *)NULL, 0) ; - ELrightend = HEcreate((Edge *)NULL, 0) ; - ELleftend->ELleft = (Halfedge *)NULL ; - ELleftend->ELright = ELrightend ; - ELrightend->ELleft = ELleftend ; - ELrightend->ELright = (Halfedge *)NULL ; - ELhash[0] = ELleftend ; - ELhash[ELhashsize-1] = ELrightend ; -} - -Halfedge * -HEcreate(Edge * e, int pm) -{ - Halfedge * answer ; - - answer = (Halfedge *)getfree(&hfl) ; - answer->ELedge = e ; - answer->ELpm = pm ; - answer->PQnext = (Halfedge *)NULL ; - answer->vertex = (Site *)NULL ; - answer->ELrefcnt = 0 ; - return (answer) ; -} - -void -ELinsert(Halfedge * lb, Halfedge * nnew) -{ - nnew->ELleft = lb ; - nnew->ELright = lb->ELright ; - (lb->ELright)->ELleft = nnew ; - lb->ELright = nnew ; -} - -/* Get entry from hash table, pruning any deleted nodes */ - -Halfedge * -ELgethash(int b) -{ - Halfedge * he ; - - if ((b < 0) || (b >= ELhashsize)) - { - return ((Halfedge *)NULL) ; - } - he = ELhash[b] ; - if ((he == (Halfedge *)NULL) || (he->ELedge != (Edge *)DELETED)) - { - return (he) ; - } - /* Hash table points to deleted half edge. Patch as necessary. */ - ELhash[b] = (Halfedge *)NULL ; - if ((--(he->ELrefcnt)) == 0) - { - makefree((Freenode *)he, (Freelist *)&hfl) ; - } - return ((Halfedge *)NULL) ; -} - -Halfedge * -ELleftbnd(VPoint * p) -{ - int i, bucket ; - Halfedge * he ; - - /* Use hash table to get close to desired halfedge */ - bucket = (p->x - xmin) / deltax * ELhashsize ; - if (bucket < 0) - { - bucket = 0 ; - } - if (bucket >= ELhashsize) - { - bucket = ELhashsize - 1 ; - } - he = ELgethash(bucket) ; - if (he == (Halfedge *)NULL) - { - for (i = 1 ; 1 ; i++) - { - if ((he = ELgethash(bucket-i)) != (Halfedge *)NULL) - { - break ; - } - if ((he = ELgethash(bucket+i)) != (Halfedge *)NULL) - { - break ; - } - } - totalsearch += i ; - } - ntry++ ; - /* Now search linear list of halfedges for the corect one */ - if (he == ELleftend || (he != ELrightend && right_of(he,p))) - { - do { - he = he->ELright ; - } while (he != ELrightend && right_of(he,p)) ; - he = he->ELleft ; - } - else - { - do { - he = he->ELleft ; - } while (he != ELleftend && !right_of(he,p)) ; - } - /*** Update hash table and reference counts ***/ - if ((bucket > 0) && (bucket < ELhashsize-1)) - { - if (ELhash[bucket] != (Halfedge *)NULL) - { - (ELhash[bucket]->ELrefcnt)-- ; - } - ELhash[bucket] = he ; - (ELhash[bucket]->ELrefcnt)++ ; - } - return (he) ; -} - -/*** This delete routine can't reclaim node, since pointers from hash - : table may be present. - ***/ - -void -ELdelete(Halfedge * he) -{ - (he->ELleft)->ELright = he->ELright ; - (he->ELright)->ELleft = he->ELleft ; - he->ELedge = (Edge *)DELETED ; -} - -Halfedge * -ELright(Halfedge * he) -{ - return (he->ELright) ; -} - -Halfedge * -ELleft(Halfedge * he) -{ - return (he->ELleft) ; -} - -Site * -leftreg(Halfedge * he) -{ - if (he->ELedge == (Edge *)NULL) - { - return(bottomsite) ; - } - return (he->ELpm == le ? he->ELedge->reg[le] : - he->ELedge->reg[re]) ; -} - -Site * -rightreg(Halfedge * he) -{ - if (he->ELedge == (Edge *)NULL) - { - return(bottomsite) ; - } - return (he->ELpm == le ? he->ELedge->reg[re] : - he->ELedge->reg[le]) ; -} - diff --git a/grid_gen/periodic_general/fortune/geometry.c b/grid_gen/periodic_general/fortune/geometry.c deleted file mode 100644 index fb06e2923..000000000 --- a/grid_gen/periodic_general/fortune/geometry.c +++ /dev/null @@ -1,220 +0,0 @@ - -/*** GEOMETRY.C ***/ - -#include -#include "vdefs.h" - -double deltax, deltay ; -int nedges, sqrt_nsites, nvertices ; -Freelist efl ; - -void -geominit(void) -{ - freeinit(&efl, sizeof(Edge)) ; - nvertices = nedges = 0 ; - sqrt_nsites = sqrt((double)(nsites+4)) ; - deltay = ymax - ymin ; - deltax = xmax - xmin ; -} - -Edge * -bisect(Site * s1, Site * s2) -{ - double dx, dy, adx, ady ; - Edge * newedge ; - - newedge = (Edge *)getfree(&efl) ; - newedge->reg[0] = s1 ; - newedge->reg[1] = s2 ; - ref(s1) ; - ref(s2) ; - newedge->ep[0] = newedge->ep[1] = (Site *)NULL ; - dx = s2->coord.x - s1->coord.x ; - dy = s2->coord.y - s1->coord.y ; - adx = dx>0 ? dx : -dx ; - ady = dy>0 ? dy : -dy ; - newedge->c = s1->coord.x * dx + s1->coord.y * dy + (dx*dx + dy*dy) * 0.5 ; - if (adx > ady) - { - newedge->a = 1.0 ; - newedge->b = dy/dx ; - newedge->c /= dx ; - } - else - { - newedge->b = 1.0 ; - newedge->a = dx/dy ; - newedge->c /= dy ; - } - newedge->edgenbr = nedges ; - out_bisector(newedge) ; - nedges++ ; - return (newedge) ; -} - -Site * -intersect(Halfedge * el1, Halfedge * el2) -{ - Edge * e1, * e2, * e ; - Halfedge * el ; - double d, xint, yint ; - int right_of_site ; - Site * v ; - - e1 = el1->ELedge ; - e2 = el2->ELedge ; - if ((e1 == (Edge*)NULL) || (e2 == (Edge*)NULL)) - { - return ((Site *)NULL) ; - } - if (e1->reg[1] == e2->reg[1]) - { - return ((Site *)NULL) ; - } - d = (e1->a * e2->b) - (e1->b * e2->a) ; - if ((-1.0e-10 < d) && (d < 1.0e-10)) - { - return ((Site *)NULL) ; - } - xint = (e1->c * e2->b - e2->c * e1->b) / d ; - yint = (e2->c * e1->a - e1->c * e2->a) / d ; - if ((e1->reg[1]->coord.y < e2->reg[1]->coord.y) || - (e1->reg[1]->coord.y == e2->reg[1]->coord.y && - e1->reg[1]->coord.x < e2->reg[1]->coord.x)) - { - el = el1 ; - e = e1 ; - } - else - { - el = el2 ; - e = e2 ; - } - right_of_site = (xint >= e->reg[1]->coord.x) ; - if ((right_of_site && (el->ELpm == le)) || - (!right_of_site && (el->ELpm == re))) - { - return ((Site *)NULL) ; - } - v = (Site *)getfree(&sfl) ; - v->refcnt = 0 ; - v->coord.x = xint ; - v->coord.y = yint ; - return (v) ; -} - -/*** returns 1 if p is to right of halfedge e ***/ - -int -right_of(Halfedge * el, VPoint * p) -{ - Edge * e ; - Site * topsite ; - int right_of_site, above, fast ; - double dxp, dyp, dxs, t1, t2, t3, yl ; - - e = el->ELedge ; - topsite = e->reg[1] ; - right_of_site = (p->x > topsite->coord.x) ; - if (right_of_site && (el->ELpm == le)) - { - return (1) ; - } - if(!right_of_site && (el->ELpm == re)) - { - return (0) ; - } - if (e->a == 1.0) - { - dyp = p->y - topsite->coord.y ; - dxp = p->x - topsite->coord.x ; - fast = 0 ; - if ((!right_of_site & (e->b < 0.0)) || - (right_of_site & (e->b >= 0.0))) - { - fast = above = (dyp >= e->b*dxp) ; - } - else - { - above = ((p->x + p->y * e->b) > (e->c)) ; - if (e->b < 0.0) - { - above = !above ; - } - if (!above) - { - fast = 1 ; - } - } - if (!fast) - { - dxs = topsite->coord.x - (e->reg[0])->coord.x ; - above = (e->b * (dxp*dxp - dyp*dyp)) - < - (dxs * dyp * (1.0 + 2.0 * dxp / - dxs + e->b * e->b)) ; - if (e->b < 0.0) - { - above = !above ; - } - } - } - else /*** e->b == 1.0 ***/ - { - yl = e->c - e->a * p->x ; - t1 = p->y - yl ; - t2 = p->x - topsite->coord.x ; - t3 = yl - topsite->coord.y ; - above = ((t1*t1) > ((t2 * t2) + (t3 * t3))) ; - } - return (el->ELpm == le ? above : !above) ; -} - -void -endpoint(Edge * e, int lr, Site * s) -{ - e->ep[lr] = s ; - ref(s) ; - if (e->ep[re-lr] == (Site *)NULL) - { - return ; - } - out_ep(e) ; - deref(e->reg[le]) ; - deref(e->reg[re]) ; - makefree((Freenode *)e, (Freelist *) &efl) ; -} - -double -dist(Site * s, Site * t) -{ - double dx,dy ; - - dx = s->coord.x - t->coord.x ; - dy = s->coord.y - t->coord.y ; - return (sqrt(dx*dx + dy*dy)) ; -} - -void -makevertex(Site * v) -{ - v->sitenbr = nvertices++ ; - out_vertex(v) ; -} - -void -deref(Site * v) -{ - if (--(v->refcnt) == 0 ) - { - makefree((Freenode *)v, (Freelist *)&sfl) ; - } -} - -void -ref(Site * v) -{ - ++(v->refcnt) ; -} - diff --git a/grid_gen/periodic_general/fortune/heap.c b/grid_gen/periodic_general/fortune/heap.c deleted file mode 100644 index 87bfbbd14..000000000 --- a/grid_gen/periodic_general/fortune/heap.c +++ /dev/null @@ -1,118 +0,0 @@ - -/*** HEAP.C ***/ - - -#include "vdefs.h" - -int PQmin, PQcount, PQhashsize ; -Halfedge * PQhash ; - -void -PQinsert(Halfedge * he, Site * v, double offset) -{ - Halfedge * last, * next ; - - he->vertex = v ; - ref(v) ; - he->ystar = v->coord.y + offset ; - last = &PQhash[ PQbucket(he)] ; - while ((next = last->PQnext) != (Halfedge *)NULL && - (he->ystar > next->ystar || - (he->ystar == next->ystar && - v->coord.x > next->vertex->coord.x))) - { - last = next ; - } - he->PQnext = last->PQnext ; - last->PQnext = he ; - PQcount++ ; -} - -void -PQdelete(Halfedge * he) -{ - Halfedge * last; - - if(he -> vertex != (Site *) NULL) - { - last = &PQhash[PQbucket(he)] ; - while (last -> PQnext != he) - { - last = last->PQnext ; - } - last->PQnext = he->PQnext; - PQcount-- ; - deref(he->vertex) ; - he->vertex = (Site *)NULL ; - } -} - -int -PQbucket(Halfedge * he) -{ - int bucket ; - - - if (he->ystar < ymin) bucket = 0; - else if (he->ystar >= ymax) bucket = PQhashsize-1; - else bucket = (he->ystar - ymin)/deltay * PQhashsize; - if (bucket < 0) - { - bucket = 0 ; - } - if (bucket >= PQhashsize) - { - bucket = PQhashsize-1 ; - } - if (bucket < PQmin) - { - PQmin = bucket ; - } - return (bucket); -} - -int -PQempty(void) -{ - return (PQcount == 0) ; -} - - -VPoint -PQ_min(void) -{ - VPoint answer ; - - while (PQhash[PQmin].PQnext == (Halfedge *)NULL) - { - ++PQmin ; - } - answer.x = PQhash[PQmin].PQnext->vertex->coord.x ; - answer.y = PQhash[PQmin].PQnext->ystar ; - return (answer) ; -} - -Halfedge * -PQextractmin(void) -{ - Halfedge * curr ; - - curr = PQhash[PQmin].PQnext ; - PQhash[PQmin].PQnext = curr->PQnext ; - PQcount-- ; - return (curr) ; -} - -void -PQinitialize(void) -{ - int i ; - - PQcount = PQmin = 0 ; - PQhashsize = 4 * sqrt_nsites ; - PQhash = (Halfedge *)myalloc(PQhashsize * sizeof *PQhash) ; - for (i = 0 ; i < PQhashsize; i++) - { - PQhash[i].PQnext = (Halfedge *)NULL ; - } -} diff --git a/grid_gen/periodic_general/fortune/memory.c b/grid_gen/periodic_general/fortune/memory.c deleted file mode 100644 index 13a75d870..000000000 --- a/grid_gen/periodic_general/fortune/memory.c +++ /dev/null @@ -1,85 +0,0 @@ - -/*** MEMORY.C ***/ - -#include -#include /* malloc(), exit() */ - -#include "vdefs.h" - -extern int sqrt_nsites, siteidx ; -char** memory_map; -int nallocs = 0; - -void -freeinit(Freelist * fl, int size) -{ - fl->head = (Freenode *)NULL ; - fl->nodesize = size ; -} - -char * -getfree(Freelist * fl) -{ - int i ; - Freenode * t ; - if (fl->head == (Freenode *)NULL) - { - t = (Freenode *) myalloc(sqrt_nsites * fl->nodesize) ; - for(i = 0 ; i < sqrt_nsites ; i++) - { - makefree((Freenode *)((char *)t+i*fl->nodesize), fl) ; - } - } - t = fl->head ; - fl->head = (fl->head)->nextfree ; - return ((char *)t) ; -} - -void -makefree(Freenode * curr, Freelist * fl) -{ - curr->nextfree = fl->head ; - fl->head = curr ; -} - -int total_alloc; - -char * -myalloc(unsigned n) -{ - char * t ; - if ((t=(char*)malloc(n)) == (char *) 0) - { - fprintf(stderr,"Insufficient memory processing site %d (%d bytes in use)\n", - siteidx, total_alloc) ; - exit(0) ; - } - total_alloc += n ; - - if (nallocs % 20000 == 0) - { - if (nallocs == 0) - memory_map = (char **)malloc((nallocs+20000)*sizeof(char*)); - else - memory_map = (char **)realloc(memory_map,(nallocs+20000)*sizeof(char*)); - } - memory_map[nallocs++] = t; - return (t) ; -} - -void free_all(void) -{ - int i; - - for (i=0; i -#include "vdefs.h" - -extern int triangulate, plot, debug ; -extern double ymax, ymin, xmax, xmin ; - -double pxmin, pxmax, pymin, pymax, cradius; - -void -openpl(void) -{ -} - -#pragma argsused -void -line(double ax, double ay, double bx, double by) -{ -} - -#pragma argsused -void -circle(double ax, double ay, double radius) -{ -} - -#pragma argsused -void -range(double pxmin, double pxmax, double pymin, double pymax) -{ -} - -void -out_bisector(Edge * e) -{ - if (triangulate && plot && !debug) - { - line(e->reg[0]->coord.x, e->reg[0]->coord.y, - e->reg[1]->coord.x, e->reg[1]->coord.y) ; - } - if (!triangulate && !plot && !debug) - { - printf("l %f %f %f\n", e->a, e->b, e->c) ; - } - if (debug) - { - printf("line(%d) %gx+%gy=%g, bisecting %d %d\n", e->edgenbr, - e->a, e->b, e->c, e->reg[le]->sitenbr, e->reg[re]->sitenbr) ; - } -} - -void -out_ep(Edge * e) -{ - if (!triangulate && plot) - { - clip_line(e) ; - } - if (!triangulate && !plot) - { - printf("e %d", e->edgenbr); - printf(" %d ", e->ep[le] != (Site *)NULL ? e->ep[le]->sitenbr : -1) ; - printf("%d\n", e->ep[re] != (Site *)NULL ? e->ep[re]->sitenbr : -1) ; - } -} - -void -out_vertex(Site * v) -{ - if (!triangulate && !plot && !debug) - { - printf ("v %f %f\n", v->coord.x, v->coord.y) ; - } - if (debug) - { - printf("vertex(%d) at %f %f\n", v->sitenbr, v->coord.x, v->coord.y) ; - } -} - -void -out_site(Site * s) -{ - if (!triangulate && plot && !debug) - { - circle (s->coord.x, s->coord.y, cradius) ; - } - if (!triangulate && !plot && !debug) - { - printf("s %f %f\n", s->coord.x, s->coord.y) ; - } - if (debug) - { - printf("site (%d) at %f %f\n", s->sitenbr, s->coord.x, s->coord.y) ; - } -} - -void -out_triple(PointSet * p, Site * s1, Site * s2, Site * s3) -{ - Point a, b, c; - a.setX(s1->coord.x); - a.setY(s1->coord.y); - a.setNum(s1->sitenbr); - b.setX(s2->coord.x); - b.setY(s2->coord.y); - b.setNum(s2->sitenbr); - c.setX(s3->coord.x); - c.setY(s3->coord.y); - c.setNum(s3->sitenbr); - p->triangulation->push_back(Triangle(a, b, c)); -//cout << "in out_triple " << a << " " << b << endl; -//cout << "in out_triple " << b << " " << a << endl; -//cout << "in out_triple " << a << " " << c << endl; -//cout << "in out_triple " << c << " " << a << endl; -//cout << "in out_triple " << b << " " << c << endl; -//cout << "in out_triple " << c << " " << b << endl; -// printf("%d %d %d\n", s1->sitenbr, s2->sitenbr, s3->sitenbr) ; -} - -void -plotinit(void) -{ - double dx, dy, d ; - - dy = ymax - ymin ; - dx = xmax - xmin ; - d = ( dx > dy ? dx : dy) * 1.1 ; - pxmin = xmin - (d-dx) / 2.0 ; - pxmax = xmax + (d-dx) / 2.0 ; - pymin = ymin - (d-dy) / 2.0 ; - pymax = ymax + (d-dy) / 2.0 ; - cradius = (pxmax - pxmin) / 350.0 ; - openpl() ; - range(pxmin, pymin, pxmax, pymax) ; -} - -void -clip_line(Edge * e) -{ - Site * s1, * s2 ; - double x1, x2, y1, y2 ; - - if (e->a == 1.0 && e->b >= 0.0) - { - s1 = e->ep[1] ; - s2 = e->ep[0] ; - } - else - { - s1 = e->ep[0] ; - s2 = e->ep[1] ; - } - if (e->a == 1.0) - { - y1 = pymin ; - if (s1 != (Site *)NULL && s1->coord.y > pymin) - { - y1 = s1->coord.y ; - } - if (y1 > pymax) - { - return ; - } - x1 = e->c - e->b * y1 ; - y2 = pymax ; - if (s2 != (Site *)NULL && s2->coord.y < pymax) - { - y2 = s2->coord.y ; - } - if (y2 < pymin) - { - return ; - } - x2 = e->c - e->b * y2 ; - if (((x1 > pxmax) && (x2 > pxmax)) || ((x1 < pxmin) && (x2 < pxmin))) - { - return ; - } - if (x1 > pxmax) - { - x1 = pxmax ; - y1 = (e->c - x1) / e->b ; - } - if (x1 < pxmin) - { - x1 = pxmin ; - y1 = (e->c - x1) / e->b ; - } - if (x2 > pxmax) - { - x2 = pxmax ; - y2 = (e->c - x2) / e->b ; - } - if (x2 < pxmin) - { - x2 = pxmin ; - y2 = (e->c - x2) / e->b ; - } - } - else - { - x1 = pxmin ; - if (s1 != (Site *)NULL && s1->coord.x > pxmin) - { - x1 = s1->coord.x ; - } - if (x1 > pxmax) - { - return ; - } - y1 = e->c - e->a * x1 ; - x2 = pxmax ; - if (s2 != (Site *)NULL && s2->coord.x < pxmax) - { - x2 = s2->coord.x ; - } - if (x2 < pxmin) - { - return ; - } - y2 = e->c - e->a * x2 ; - if (((y1 > pymax) && (y2 > pymax)) || ((y1 < pymin) && (y2 pymax) - { - y1 = pymax ; - x1 = (e->c - y1) / e->a ; - } - if (y1 < pymin) - { - y1 = pymin ; - x1 = (e->c - y1) / e->a ; - } - if (y2 > pymax) - { - y2 = pymax ; - x2 = (e->c - y2) / e->a ; - } - if (y2 < pymin) - { - y2 = pymin ; - x2 = (e->c - y2) / e->a ; - } - } - line(x1,y1,x2,y2); -} - diff --git a/grid_gen/periodic_general/fortune/vdefs.h b/grid_gen/periodic_general/fortune/vdefs.h deleted file mode 100644 index 8a3590838..000000000 --- a/grid_gen/periodic_general/fortune/vdefs.h +++ /dev/null @@ -1,141 +0,0 @@ -#ifndef __VDEFS_H -#define __VDEFS_H - -#include "PointSet.h" - -#ifndef NULL -#define NULL 0 -#endif - -#define DELETED -2 - -/* -typedef struct tagFreenode - { - struct tagFreenode * nextfree; - } Freenode ; - - -typedef struct tagFreelist - { - Freenode * head; - int nodesize; - } Freelist ; - -typedef struct tagPoint - { - double x ; - double y ; - } VPoint ; - -// structure used both for sites and for vertices // - -typedef struct tagSite - { - VPoint coord ; - int sitenbr ; - int refcnt ; - } Site ; - - -typedef struct tagEdge - { - double a, b, c ; - Site * ep[2] ; - Site * reg[2] ; - int edgenbr ; - } Edge ; -*/ - -#define le 0 -#define re 1 - -typedef struct tagHalfedge - { - struct tagHalfedge * ELleft ; - struct tagHalfedge * ELright ; - Edge * ELedge ; - int ELrefcnt ; - char ELpm ; - Site * vertex ; - double ystar ; - struct tagHalfedge * PQnext ; - } Halfedge ; - -/* edgelist.c */ -void ELinitialize(void) ; -Halfedge * HEcreate(Edge *, int) ; -void ELinsert(Halfedge *, Halfedge *) ; -Halfedge * ELgethash(int) ; -Halfedge * ELleftbnd(VPoint *) ; -void ELdelete(Halfedge *) ; -Halfedge * ELright(Halfedge *) ; -Halfedge * ELleft(Halfedge *) ; -Site * leftreg(Halfedge *) ; -Site * rightreg(Halfedge *) ; -extern int ELhashsize ; -extern Site * bottomsite ; -extern Freelist hfl ; -extern Halfedge * ELleftend, * ELrightend, **ELhash ; - -/* geometry.c */ -void geominit(void) ; -Edge * bisect(Site *, Site *) ; -Site * intersect(Halfedge *, Halfedge *) ; -int right_of(Halfedge *, VPoint *) ; -void endpoint(Edge *, int, Site *) ; -double dist(Site *, Site *) ; -void makevertex(Site *) ; -void deref(Site *) ; -void ref(Site *) ; -extern double deltax, deltay ; -extern int nsites, nedges, sqrt_nsites, nvertices ; -extern Freelist sfl, efl ; - -/* heap.c */ -void PQinsert(Halfedge *, Site *, double) ; -void PQdelete(Halfedge *) ; -int PQbucket(Halfedge *) ; -int PQempty(void) ; -VPoint PQ_min(void) ; -Halfedge * PQextractmin(void) ; -void PQinitialize(void) ; -extern int PQmin, PQcount, PQhashsize ; -extern Halfedge * PQhash ; - -/* main.c */ -extern int sorted, triangulate, plot, debug, nsites, siteidx ; -extern double xmin, xmax, ymin, ymax ; -extern Site * sites ; -extern Freelist sfl ; -int voronoi_main(int, char **); - -/* getopt.c */ -extern int getopt(int, char *const *, const char *); - -/* memory.c */ -void freeinit(Freelist *, int) ; -char *getfree(Freelist *) ; -void makefree(Freenode *, Freelist *) ; -char *myalloc(unsigned) ; -void free_all(void); - -/* output.c */ -void openpl(void) ; -void line(double, double, double, double) ; -void circle(double, double, double) ; -void range(double, double, double, double) ; -void out_bisector(Edge *) ; -void out_ep(Edge *) ; -void out_vertex(Site *) ; -void out_site(Site *) ; -void out_triple(PointSet *, Site *, Site *, Site *) ; -void plotinit(void) ; -void clip_line(Edge *) ; - -/* voronoi.c */ -void voronoi(PointSet * p, Site *(*)()) ; - -#endif - - diff --git a/grid_gen/periodic_general/fortune/voronoi.c b/grid_gen/periodic_general/fortune/voronoi.c deleted file mode 100644 index ec5319f7d..000000000 --- a/grid_gen/periodic_general/fortune/voronoi.c +++ /dev/null @@ -1,121 +0,0 @@ - -/*** VORONOI.C ***/ - -#include "vdefs.h" - -extern Site * bottomsite ; -extern Halfedge * ELleftend, * ELrightend ; - -/*** implicit parameters: nsites, sqrt_nsites, xmin, xmax, ymin, ymax, - : deltax, deltay (can all be estimates). - : Performance suffers if they are wrong; better to make nsites, - : deltax, and deltay too big than too small. (?) - ***/ - -void -voronoi(PointSet * ptset, Site *(*nextsite)(void)) -{ - Site * newsite, * bot, * top, * temp, * p, * v ; - VPoint newintstar ; - int pm ; - Halfedge * lbnd, * rbnd, * llbnd, * rrbnd, * bisector ; - Edge * e ; - - PQinitialize() ; - bottomsite = (*nextsite)() ; - out_site(bottomsite) ; - ELinitialize() ; - newsite = (*nextsite)() ; - while (1) - { - if(!PQempty()) - { - newintstar = PQ_min() ; - } - if (newsite != (Site *)NULL && (PQempty() - || newsite -> coord.y < newintstar.y - || (newsite->coord.y == newintstar.y - && newsite->coord.x < newintstar.x))) {/* new site is -smallest */ - { - out_site(newsite) ; - } - lbnd = ELleftbnd(&(newsite->coord)) ; - rbnd = ELright(lbnd) ; - bot = rightreg(lbnd) ; - e = bisect(bot, newsite) ; - bisector = HEcreate(e, le) ; - ELinsert(lbnd, bisector) ; - p = intersect(lbnd, bisector) ; - if (p != (Site *)NULL) - { - PQdelete(lbnd) ; - PQinsert(lbnd, p, dist(p,newsite)) ; - } - lbnd = bisector ; - bisector = HEcreate(e, re) ; - ELinsert(lbnd, bisector) ; - p = intersect(bisector, rbnd) ; - if (p != (Site *)NULL) - { - PQinsert(bisector, p, dist(p,newsite)) ; - } - newsite = (*nextsite)() ; - } - else if (!PQempty()) /* intersection is smallest */ - { - lbnd = PQextractmin() ; - llbnd = ELleft(lbnd) ; - rbnd = ELright(lbnd) ; - rrbnd = ELright(rbnd) ; - bot = leftreg(lbnd) ; - top = rightreg(rbnd) ; - out_triple(ptset, bot, top, rightreg(lbnd)) ; - v = lbnd->vertex ; - makevertex(v) ; - endpoint(lbnd->ELedge, lbnd->ELpm, v); - endpoint(rbnd->ELedge, rbnd->ELpm, v) ; - ELdelete(lbnd) ; - PQdelete(rbnd) ; - ELdelete(rbnd) ; - pm = le ; - if (bot->coord.y > top->coord.y) - { - temp = bot ; - bot = top ; - top = temp ; - pm = re ; - } - e = bisect(bot, top) ; - bisector = HEcreate(e, pm) ; - ELinsert(llbnd, bisector) ; - endpoint(e, re-pm, v) ; - deref(v) ; - p = intersect(llbnd, bisector) ; - if (p != (Site *) NULL) - { - PQdelete(llbnd) ; - PQinsert(llbnd, p, dist(p,bot)) ; - } - p = intersect(bisector, rrbnd) ; - if (p != (Site *) NULL) - { - PQinsert(bisector, p, dist(p,bot)) ; - } - } - else - { - break ; - } - } - - for( lbnd = ELright(ELleftend) ; - lbnd != ELrightend ; - lbnd = ELright(lbnd)) - { - e = lbnd->ELedge ; - out_ep(e) ; - } - -} - diff --git a/grid_gen/periodic_general/fortune/voronoi_main.c b/grid_gen/periodic_general/fortune/voronoi_main.c deleted file mode 100644 index 04662acf8..000000000 --- a/grid_gen/periodic_general/fortune/voronoi_main.c +++ /dev/null @@ -1,135 +0,0 @@ -/*** MAIN.C ***/ - -#include -#include /* realloc(), qsort() */ -#include "vdefs.h" -using namespace std; - -Site * readone(void), * nextone(void) ; -void readsites(PointSet * p) ; - -int sorted, triangulate, plot, debug, nsites, siteidx ; -double xmin, xmax, ymin, ymax ; -Site * sites ; -Freelist sfl ; - -void -voronoi_main(PointSet * p) -{ - int c ; - Site *(*next)() ; - - sorted = plot = debug = 0 ; - triangulate = 1 ; - - freeinit(&sfl, sizeof(Site)) ; - readsites(p) ; - next = nextone ; - siteidx = 0 ; - geominit() ; - voronoi(p, next) ; - free_all(); -} - -/*** sort sites on y, then x, coord ***/ - -int -scomp(const void * vs1, const void * vs2) -{ - VPoint * s1 = (VPoint *)vs1 ; - VPoint * s2 = (VPoint *)vs2 ; - - if (s1->y < s2->y) - { - return (-1) ; - } - if (s1->y > s2->y) - { - return (1) ; - } - if (s1->x < s2->x) - { - return (-1) ; - } - if (s1->x > s2->x) - { - return (1) ; - } - return (0) ; -} - -/*** return a single in-storage site ***/ - -Site * -nextone(void) -{ - Site * s ; - - if (siteidx < nsites) - { - s = &sites[siteidx++]; - return (s) ; - } - else - { - return ((Site *)NULL) ; - } -} - -/*** read all sites, sort, and compute xmin, xmax, ymin, ymax ***/ - -void -readsites(PointSet * p) -{ - int i ; - int j ; - - int MaxSize = 2000000; - - nsites = 0 ; - sites = (Site *) myalloc(MaxSize * sizeof(Site)); - for(j=0; jnPoints; j++) { - sites[nsites].coord.x = p->points[j]->getX(); - sites[nsites].coord.y = p->points[j]->getY(); - sites[nsites].sitenbr = p->points[j]->getNum() ; - sites[nsites++].refcnt = 0 ; - if (nsites % MaxSize == 0) { - sites = (Site *)realloc(sites,(nsites+MaxSize)*sizeof(Site)); - } - } - - qsort((void *)sites, nsites, sizeof(Site), scomp) ; - xmin = sites[0].coord.x ; - xmax = sites[0].coord.x ; - for (i = 1 ; i < nsites ; ++i) - { - if(sites[i].coord.x < xmin) - { - xmin = sites[i].coord.x ; - } - if (sites[i].coord.x > xmax) - { - xmax = sites[i].coord.x ; - } - } - ymin = sites[0].coord.y ; - ymax = sites[nsites-1].coord.y ; -} - -/*** read one site ***/ - -Site * -readone(void) -{ - Site * s ; - - s = (Site *)getfree(&sfl) ; - s->refcnt = 0 ; - s->sitenbr = siteidx++ ; - if (scanf("%lf %lf", &(s->coord.x), &(s->coord.y)) == EOF) - { - return ((Site *)NULL ) ; - } - return (s) ; -} - diff --git a/grid_gen/periodic_general/main.cxx b/grid_gen/periodic_general/main.cxx deleted file mode 100644 index 6e986fcca..000000000 --- a/grid_gen/periodic_general/main.cxx +++ /dev/null @@ -1,586 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "PointSet.h" -#include "Triangle.h" -#include "DensityFunction.h" -#include "netcdf.h" -#include -using namespace std; - -#define ALLOC_INT2D(ARR,I,J) (ARR) = new int*[(I)]; for(int i=0; i<(I); i++) (ARR)[i] = new int[(J)]; -#define DEALLOC_INT2D(ARR,I,J) for(int i=0; i<(I); i++) delete [] (ARR)[i]; delete [] (ARR); - -#define ALLOC_REAL2D(ARR,I,J) (ARR) = new double*[(I)]; for(int i=0; i<(I); i++) (ARR)[i] = new double[(J)]; -#define DEALLOC_REAL2D(ARR,I,J) for(int i=0; i<(I); i++) delete [] (ARR)[i]; delete [] (ARR); - -int obtuse_triangle(Triangle &t); - -void write_netcdf(int nCells, int nVertices, int vertexDegree, - double *xCell, double *yCell, double *zCell, - double *xVertex, double *yVertex, double *zVertex, - double *meshDensity, int *cellsOnVertex, - double x_period, double y_period); - -void readParamsFile(); - -Point segment_intersect(Point& p0, Point &p1, Point &q0, Point&q1); - - -// run-time parameters - double EPS = 1.0e-7; - double X_PERIOD = 1.0; - double Y_PERIOD = 1.0; - double X_BUFFER_FRAC = 0.05; - double Y_BUFFER_FRAC = 0.05; - double X_BUFFER_W = 1.0; // Buffer width in length units - these get calculated automatically from X/Y_BUFFER_FRAC - double Y_BUFFER_W = 1.0; - int NUMPOINTS = 200; - int MAXITR = 100; - int USE_MC = 1; // 1=true, 0=read from file - int USE_DATA_DENSITY = 0; // 1=true, 0=analytic density function in DensityFunction.cxx - - - -int main(int argc, char ** argv) -{ - -// read user-specified settings - readParamsFile(); - - int i, ii, jj, n, iter, idx, npts, np; - DensityFunction f(X_PERIOD, Y_PERIOD, USE_DATA_DENSITY); - PointSet pset; - PointSet out_pset; - vector * vcs; - Point * cells; - Point * temp_p; - Point * temp_pp; - Point p3; - Triangle t; - Point p, p2; - vector * clist; - vector * triangulation; - vector::iterator it; - set delaunay_tri; - set::iterator dti; - list norm_dt; - list::iterator norm_dti; - vector< vector > vertices_on_cell; - vector< vector > cells_on_cell; - vector< set > coc; - set::iterator cell_iter; - vector< vector > cv_on_cell; - Triangle * tri; - vector * vlist; - vector * elist; - double xcell, ycell; - double x, y; - double total_mass, mass; - FILE * restart; - int nCells, nVertices, vertexDegree; - double *xCell, *yCell, *zCell, *xVertex, *yVertex, *zVertex, *meshDensity; - int *cellsOnVertex; - - - - - - - if (USE_MC == 1) { - cout << "Generating Monte Carlo points..." <getX() < (double)( X_BUFFER_W ) || pset[i]->getX() > (double)( X_PERIOD - X_BUFFER_W )) - pset[i]->setBoundaryPoint(1); - if (pset[i]->getY() < (double)( Y_BUFFER_W ) || pset[i]->getY() > (double)( Y_PERIOD - Y_BUFFER_W )) - pset[i]->setBoundaryPoint(1); - } - - - /* - * Lloyd iteration - */ - for (iter=0; iterisBoundaryPoint()) { - total_mass = 0.0; - p.setXY(0.0, 0.0); - for (int j=0; jsetXY(p.getX(), p.getY()); - - /* If point has drifted into boundary region, push it back... */ - pset[i]->setX(pset[i]->getX() < (double)( X_BUFFER_W ) ? (double)( X_BUFFER_W ) : pset[i]->getX()); - pset[i]->setX(pset[i]->getX() > (double)( X_PERIOD - X_BUFFER_W ) ? (double)( X_PERIOD - X_BUFFER_W ) : pset[i]->getX()); - pset[i]->setY(pset[i]->getY() < (double)( Y_BUFFER_W ) ? (double)( Y_BUFFER_W ) : pset[i]->getY()); - pset[i]->setY(pset[i]->getY() > (double)( Y_PERIOD - Y_BUFFER_W ) ? (double)( Y_PERIOD - Y_BUFFER_W ) : pset[i]->getY()); - } - } - delete [] vcs; - if (iter % 20 == 0) { - // Write restart file every 20 iterations (could become a runtime configurable setting) - cout << "Writing restart.txt..." << endl; - restart = fopen("restart.txt","w"); - for(i=0; igetX(), pset[i]->getY()); - } - fclose(restart); - } - - } - - // Write restart again at the end - cout << "Writing restart.txt..." << endl; - - restart = fopen("restart.txt","w"); - for(i=0; igetX(), pset[i]->getY()); - } - fclose(restart); - - /* - * To get a triangulation of the points, we'll need to make copies of the boundary points - */ - cout << "Creating triangulation..." << endl; - npts = pset.size(); - for (i=0; igetX(), pset[i]->getY(), 0); - temp_p->setNum(pset[i]->getNum()); - if (pset[i]->isBoundaryPoint()) - temp_p->setBoundaryPoint(1); - out_pset.addPoint(*temp_p); - - /* If this is a boundary point, add it again in a periodic way */ - if (temp_p->isBoundaryPoint()) { - - if (temp_p->getX() < (double)( X_BUFFER_W )) { - - /* RIGHT SIDE */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() + (double)( X_PERIOD )); - out_pset.addPoint(*temp_pp); - - if (temp_p->getY() < (double)( Y_BUFFER_W )) { - - /* UPPER-RIGHT CORNER */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() + (double)( X_PERIOD )); - temp_pp->setY(temp_pp->getY() + (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - else if (temp_p->getY() > (double)( Y_PERIOD - Y_BUFFER_W )) { - - /* LOWER-RIGHT CORNER */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() + (double)( X_PERIOD )); - temp_pp->setY(temp_pp->getY() - (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - } - else if (temp_p->getX() > (double)( X_PERIOD - X_BUFFER_W )) { - - /* LEFT SIDE */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() - (double)( X_PERIOD )); - out_pset.addPoint(*temp_pp); - - if (temp_p->getY() < (double)( Y_BUFFER_W )) { - - /* UPPER-LEFT CORNER */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() - (double)( X_PERIOD )); - temp_pp->setY(temp_pp->getY() + (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - else if (temp_p->getY() > (double)( Y_PERIOD - Y_BUFFER_W )) { - - /* LOWER-LEFT CORNER */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setX(temp_pp->getX() - (double)( X_PERIOD )); - temp_pp->setY(temp_pp->getY() - (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - } - - if (temp_p->getY() < (double)( Y_BUFFER_W )) { - - /* TOP SIDE */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setY(temp_pp->getY() + (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - else if (temp_p->getY() > (double)( Y_PERIOD - Y_BUFFER_W )) { - - /* BOTTOM SIDE */ - temp_pp = new Point(temp_p->getX(), temp_p->getY(), 0); - temp_pp->setNum(-1 * (temp_p->getNum() + 1)); /* Bdy points have negative indices */ - temp_pp->setY(temp_pp->getY() - (double)( Y_PERIOD )); - out_pset.addPoint(*temp_pp); - } - - } - - } - - - - /* - * Having obtained a triangulation of "real" generating points as well as "ghost" points, - * we need to scan through the triangles and keep a unique set that triangulates a truly - * doubly-periodic grid - */ - cout << "Finding unique set of triangles..." << endl; - triangulation = out_pset.getTriangulation(); - for (it = triangulation->begin(); it != triangulation->end(); it++) { - /* - * Ghost/halo points have a negative index; if all of the vertices of a triangle - * are negative, the triangle is redundant - */ - ii = 0; - for (int j=0; j<3; j++) - if ( it->getVertex(j).getNum() >= 0 ) - ii++; - - /* - * If at least one corner of the triangle is non-negative, we consider keeping it, - * but only if it isn't redundant with another triangle already added to the set - */ - if ( ii > 0 ) { - tri = new Triangle(); - - for (int j=0; j<3; j++) { - temp_p = new Point(it->getVertex(j).getX(), it->getVertex(j).getY(), 0); - temp_p->setNum(it->getVertex(j).getNum()); - - /* Set point number back to positive value */ - if (temp_p->getNum() < 0) - temp_p->setNum(-1 * (temp_p->getNum() + 1)); - tri->setVertex(j, *temp_p); - } - - dti = delaunay_tri.find(*tri); - if (dti == delaunay_tri.end()) - delaunay_tri.insert(*tri); - else - delete tri; - } - } - - cout << "Ensuring corner locations are in range of domain..." << endl; - - /* - * Scan through triangles and ensure that corner locations are in the range (0,X_PERIOD],(0,Y_PERIOD] - */ - for (dti = delaunay_tri.begin(); dti != delaunay_tri.end(); dti++) { - t = *dti; - t.normalizeVertices((double)( EPS ), (double)( X_PERIOD + EPS ), (double)( EPS ), (double)( Y_PERIOD + EPS )); - norm_dt.push_back(t); - } - - - delete triangulation; - - - /* - * Generate {x,y,z}{Cell,Vertex}, meshDensity, and cellsOnVertex fields into simple arrays - */ - cout << "Generating {x,y,z}{Cell,Vertex}, meshDensity, and cellsOnVertex fields into simple arrays..." << endl; - nCells = pset.size(); - nVertices = norm_dt.size(); - vertexDegree = 3; - cout << "nCells = " << nCells << endl; - cout << "nVertices = " << nVertices << endl; - - xCell = (double *)malloc(sizeof(double) * (size_t)nCells); - yCell = (double *)malloc(sizeof(double) * (size_t)nCells); - zCell = (double *)malloc(sizeof(double) * (size_t)nCells); - - xVertex = (double *)malloc(sizeof(double) * (size_t)nVertices); - yVertex = (double *)malloc(sizeof(double) * (size_t)nVertices); - zVertex = (double *)malloc(sizeof(double) * (size_t)nVertices); - - meshDensity = (double *)malloc(sizeof(double) * (size_t)nCells); - - cellsOnVertex = (int *)malloc(sizeof(int) * (size_t)nVertices * (size_t)vertexDegree); - - npts = pset.size(); - for (i=0; igetX(); - yCell[i] = pset[i]->getY(); - zCell[i] = 0.0; - meshDensity[i] = f.evaluate(*pset[i]); - } - - i = 0; - ii = 0; - for (norm_dti = norm_dt.begin(); norm_dti != norm_dt.end(); norm_dti++) { - p = norm_dti->circumcenter(); - xVertex[i] = p.getX(); - yVertex[i] = p.getY(); - zVertex[i] = 0.0; - for (int j=0; j<3; j++) - cellsOnVertex[ii++] = norm_dti->getVertex(j).getNum() + 1; /* indices are 1-based in MPAS */ -// cellsOnVertex[ii++] = norm_dti->getVertex(j).getNum(); /* Do not use the 0-based indices when making meshes for MPAS */ - i++; - } - - - /* - * Write fields to NetCDF file - */ - cout << "Writing to netCDF file..." << endl; - write_netcdf(nCells, nVertices, vertexDegree, xCell, yCell, zCell, xVertex, yVertex, zVertex, meshDensity, cellsOnVertex, (double)( X_PERIOD ), (double)( Y_PERIOD )); - - - free(xCell); - free(yCell); - free(zCell); - free(xVertex); - free(yVertex); - free(zVertex); - free(meshDensity); - free(cellsOnVertex); - - - cout << "Successful completion." << endl; - return 0; -} - - -int obtuse_triangle(Triangle &t) -{ - int i; - Point p[3]; - double PI = 2.0 * acos(0.0); - - p[0] = t.getVertex(0); - p[1] = t.getVertex(1); - p[2] = t.getVertex(2); - - for(i=0; i<3; i++) { - if (fabs(angle(p[i], p[(i+1)%3], p[(i+2)%3])) > PI/2.0) { -cout << p[i] << " " << p[(i+1)%3] << " " << p[(i+2)%3] << endl; - return i+1; - } - } - - return 0; -} - - -Point segment_intersect(Point& p0, Point &p1, Point &q0, Point&q1) -{ - Point retval; - - Point u = (p1 - p0); - Point v = (q1 - q0); - Point w = (p0 - q0); - - double s; - - s = (v.getY()*w.getX() - v.getX()*w.getY())/(v.getX()*u.getY() - v.getY()*u.getX()); - - retval = p0 + u*s; - - return retval; -} - - -void write_netcdf(int nCells, int nVertices, int vertexDegree, - double * xCell, double * yCell, double * zCell, - double * xVertex, double * yVertex, double * zVertex, - double * meshDensity, int * cellsOnVertex, - double x_period, double y_period - ) -{ - int i, j, k; - int ncerr; - int ncid; - int dimIDnCells, dimIDnVertices, dimIDvertexDegree; - int varIDxCell, varIDyCell, varIDzCell; - int varIDxVertex, varIDyVertex, varIDzVertex; - int varIDcellsOnVertex, varIDmeshDensity; - - int dimids1[1]; - int dimids2[2]; - int dimids3[3]; - size_t start1[1], count1[1]; - size_t start2[2], count2[2]; - size_t start3[3], count3[3]; - - double sphere_radius = 0.0; - - - ncerr = nc_create("grid.nc", NC_SHARE, &ncid); - - ncerr = nc_def_dim(ncid, "nCells", (size_t)nCells, &dimIDnCells); - ncerr = nc_def_dim(ncid, "nVertices", (size_t)nVertices, &dimIDnVertices); - ncerr = nc_def_dim(ncid, "vertexDegree", (size_t)vertexDegree, &dimIDvertexDegree); - - dimids1[0] = dimIDnCells; - ncerr = nc_def_var(ncid, "xCell", NC_DOUBLE, 1, dimids1, &varIDxCell); - ncerr = nc_def_var(ncid, "yCell", NC_DOUBLE, 1, dimids1, &varIDyCell); - ncerr = nc_def_var(ncid, "zCell", NC_DOUBLE, 1, dimids1, &varIDzCell); - ncerr = nc_def_var(ncid, "meshDensity", NC_DOUBLE, 1, dimids1, &varIDmeshDensity); - dimids1[0] = dimIDnVertices; - ncerr = nc_def_var(ncid, "xVertex", NC_DOUBLE, 1, dimids1, &varIDxVertex); - ncerr = nc_def_var(ncid, "yVertex", NC_DOUBLE, 1, dimids1, &varIDyVertex); - ncerr = nc_def_var(ncid, "zVertex", NC_DOUBLE, 1, dimids1, &varIDzVertex); - dimids2[0] = dimIDnVertices; - dimids2[1] = dimIDvertexDegree; - ncerr = nc_def_var(ncid, "cellsOnVertex", NC_INT, 2, dimids2, &varIDcellsOnVertex); - - ncerr = nc_put_att_text(ncid, NC_GLOBAL, "on_a_sphere", 16, "NO "); - ncerr = nc_put_att_text(ncid, NC_GLOBAL, "is_periodic", 16, "YES "); - ncerr = nc_put_att_double(ncid, NC_GLOBAL, "sphere_radius", NC_DOUBLE, 1, &sphere_radius); - ncerr = nc_put_att_double(ncid, NC_GLOBAL, "x_offset", NC_DOUBLE, 1, &x_period); - ncerr = nc_put_att_double(ncid, NC_GLOBAL, "y_offset", NC_DOUBLE, 1, &y_period); - - ncerr = nc_enddef(ncid); - - start1[0] = 0; - start2[0] = 0; - start2[1] = 0; - count1[0] = nCells; - ncerr = nc_put_vara_double(ncid, varIDxCell, start1, count1, xCell); - ncerr = nc_put_vara_double(ncid, varIDyCell, start1, count1, yCell); - ncerr = nc_put_vara_double(ncid, varIDzCell, start1, count1, zCell); - ncerr = nc_put_vara_double(ncid, varIDmeshDensity, start1, count1, meshDensity); - count1[0] = nVertices; - ncerr = nc_put_vara_double(ncid, varIDxVertex, start1, count1, xVertex); - ncerr = nc_put_vara_double(ncid, varIDyVertex, start1, count1, yVertex); - ncerr = nc_put_vara_double(ncid, varIDzVertex, start1, count1, zVertex); - count2[0] = nVertices; - count2[1] = vertexDegree; - ncerr = nc_put_vara_int(ncid, varIDcellsOnVertex, start2, count2, cellsOnVertex); - - ncerr = nc_close(ncid); -} - - - -/* ***** Setup Routines ***** */ -void readParamsFile(){ - //Read in parameters from Params. - //If Params doesn't exist, write out Params with a default set of parameters - string junk; - ifstream params("Params.txt"); - int temp_restart_mode; - int temp_fileio_mode; - - if(!params){ - cout << "Error opening Params.txt file." << endl; - cout << "Writing a default Params.txt file." << endl; - cout << "Exiting, please set up Params.txt, and rerun." << endl; - ofstream pout("Params.txt"); - pout << "Convergence tolerance to use:" << endl; - pout << EPS << endl; - pout << "Maximum number of iterations to perform:" << endl; - pout << MAXITR << endl; - pout << "How to get initial pointset. 0=from file; 1=Monte Carlo points from density function" << endl; - pout << USE_MC << endl; - pout << "If using Monte Carlo points, how many do you want?" << endl; - pout << NUMPOINTS << endl; - pout << "Domain width (x)" << endl; - pout << X_PERIOD << endl; - pout << "Domain height (y)" << endl; - pout << Y_PERIOD << endl; - pout << "Fraction of domain to set as a buffer in which initial point locations remain fixed, x-direction" << endl; - pout << X_BUFFER_FRAC << endl; - pout << "Fraction of domain to set as a buffer in which initial point locations remain fixed, y-direction" << endl; - pout << Y_BUFFER_FRAC << endl; - pout << "Use data density in file named density.nc with variables x, y, density. 1=true, 0=analytic density function in DensityFunction.cxx" << endl; - pout << USE_DATA_DENSITY << endl; - - pout.close(); - - exit(1); - } - - - getline(params,junk); - params >> EPS; - params.ignore(10000,'\n'); - getline(params,junk); - params >> MAXITR; - params.ignore(10000,'\n'); - getline(params,junk); - params >> USE_MC; - params.ignore(10000,'\n'); - getline(params,junk); - params >> NUMPOINTS; - params.ignore(10000,'\n'); - getline(params,junk); - params >> X_PERIOD; - params.ignore(10000,'\n'); - getline(params,junk); - params >> Y_PERIOD; - params.ignore(10000,'\n'); - getline(params,junk); - params >> X_BUFFER_FRAC; - params.ignore(10000,'\n'); - getline(params,junk); - params >> Y_BUFFER_FRAC; - params.ignore(10000,'\n'); - getline(params,junk); - params >> USE_DATA_DENSITY; - params.ignore(10000,'\n'); - - params.close(); - - cout << "=== Specified settings are: ===" << endl; - cout << "Convergence tolerance to use:" << endl; - cout << EPS << endl; - cout << "Maximum number of iterations to perform:" << endl; - cout << MAXITR << endl; - cout << "How to get initial pointset. 0=from file; 1=Monte Carlo points from density function" << endl; - cout << USE_MC << endl; - cout << "If using Monte Carlo points, how many do you want?" << endl; - cout << NUMPOINTS << endl; - cout << "Domain width (x)" << endl; - cout << X_PERIOD << endl; - cout << "Domain height (y)" << endl; - cout << Y_PERIOD << endl; - cout << "Fraction of domain to set as a buffer in which initial point locations remain fixed, x-direction" << endl; - cout << X_BUFFER_FRAC << endl; - cout << "Fraction of domain to set as a buffer in which initial point locations remain fixed, y-direction" << endl; - cout << Y_BUFFER_FRAC << endl; - cout << "Use data density in file named density.nc with variables x, y, density. 1=true, 0=analytic density function in DensityFunction.cxx" << endl; - cout << USE_DATA_DENSITY << endl; - - X_BUFFER_W = X_PERIOD * X_BUFFER_FRAC; - Y_BUFFER_W = Y_PERIOD * Y_BUFFER_FRAC; -} diff --git a/grid_gen/periodic_general/mkgrid.cxx b/grid_gen/periodic_general/mkgrid.cxx deleted file mode 100644 index eb469b05c..000000000 --- a/grid_gen/periodic_general/mkgrid.cxx +++ /dev/null @@ -1,635 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include "PointSet.h" -#include "Triangle.h" -#include "DensityFunction.h" -#include "netcdf.h" -using namespace std; - -#define EPS 1.0e-7 - -#define ALLOC_INT2D(ARR,I,J) (ARR) = new int*[(I)]; for(int i=0; i<(I); i++) (ARR)[i] = new int[(J)]; -#define DEALLOC_INT2D(ARR,I,J) for(int i=0; i<(I); i++) delete [] (ARR)[i]; delete [] (ARR); - -#define ALLOC_REAL2D(ARR,I,J) (ARR) = new double*[(I)]; for(int i=0; i<(I); i++) (ARR)[i] = new double[(J)]; -#define DEALLOC_REAL2D(ARR,I,J) for(int i=0; i<(I); i++) delete [] (ARR)[i]; delete [] (ARR); - -void read_netcdf(int *nCells, int *nVertices, int *vertexDegree, - double **xCell, double **yCell, double **zCell, - double **xVertex, double **yVertex, double **zVertex, - double **meshDensity, int **cellsOnVertex, - double *x_period, double *y_period); - -void write_netcdf(int nCells, int nEdges, int nVertices, int maxEdges, int vertexDegree, - int * indexToCellID, int * indexToEdgeID, int * indexToVertexID, - double * xCell, double * yCell, double * zCell, double * latCell, double * lonCell, - double * xEdge, double * yEdge, double * zEdge, double * latEdge, double * lonEdge, - double * xVertex, double * yVertex, double * zVertex, double * latVertex, double * lonVertex, - int * nEdgesOnCell, int * nEdgesOnEdge, - int ** cellsOnCell, int ** edgesOnCell, int ** verticesOnCell, - int ** cellsOnEdge, int ** verticesOnEdge, int ** edgesOnEdge, - int ** edgesOnVertex, int ** cellsOnVertex, double ** kiteAreasOnVertex, - double * fEdge, double * fVertex, double * dvEdge, double * dcEdge, double * areaCell, double * areaTriangle, double * angleEdge, - double ** weightsOnEdge); - -Point segment_intersect(Point& p0, Point &p1, Point &q0, Point&q1); - -int main(int argc, char ** argv) -{ - int i, j, k, ii, jj; -// DensityFunction f; -// PointSet out_pset; -// Point * cells; - Point * temp_p; -// Point * temp_pp; -// Point p3; - Triangle t; - Point p; -// vector * clist; -// vector * triangulation; -// vector::iterator it; -// set delaunay_tri; -// set::iterator dti; -// list norm_dt; -// list::iterator norm_dti; - vector< set > cellsOnCell_temp; - vector< vector > cellsOnVertex_v; - vector< vector > edgesOnVertex_v; - vector< vector > verticesOnCell_v; - vector< vector > cellsOnCell_v; - vector< vector > cellsOnEdge_v; - vector< vector > verticesOnEdge_v; - vector< vector > edgesOnCell_v; - vector areaCell_v, areaTriangle_v; - vector dcEdge_v, dvEdge_v; - vector angleEdge_v; - vector nEdgesOnCell_v; - vector cells_v; - vector vertices_v; - vector edges_v; - vector edge_segments; - set::iterator cell_iter; /* TESTING CODE */ -// vector< vector > cv_on_cell; /* TESTING CODE */ -// Triangle * tri; -// double xcell, ycell; - double x, y; -// double total_mass, mass; -// FILE * restart; - int nCells, nVertices, nEdges, vertexDegree; - double *xCell, *yCell, *zCell, *xVertex, *yVertex, *zVertex, *meshDensity; - int *cellsOnVertex; - double x_period, y_period; - - - /* - * Read basic grid info from NetCDF file - */ - read_netcdf(&nCells, &nVertices, &vertexDegree, &xCell, &yCell, &zCell, &xVertex, &yVertex, &zVertex, &meshDensity, &cellsOnVertex, &x_period, &y_period); - - cout << "Read from input file:" << endl; - cout << " nCells = " << nCells << endl; - cout << " nVertices = " << nVertices << endl; - cout << " vertexDegree = " << vertexDegree << endl; - cout << " x_period = " << x_period << endl; - cout << " y_period = " << y_period << endl; - cout << endl; - - /* - * vector of cells - */ - cells_v.resize(nCells); - for (i=0; isetNum(i); - cells_v[i] = *temp_p; - } - - - /* - * vector of vertices - */ - vertices_v.resize(nVertices); - for (i=0; isetNum(i); - vertices_v[i] = *temp_p; - } - - - /* - * cellsOnVertex - */ - cellsOnVertex_v.resize(nVertices); - for (i=0; i $*.f90 - $(FC) $(FFLAGS) -c $*.f90 $(INCLUDES) - $(RM) $*.f90 diff --git a/grid_gen/periodic_hex_minimal/README b/grid_gen/periodic_hex_minimal/README deleted file mode 100644 index 99a7f9817..000000000 --- a/grid_gen/periodic_hex_minimal/README +++ /dev/null @@ -1,16 +0,0 @@ - -This dir contains a version of the orginal periodic hex code that creats a "minimal" grid.nc file -to be then processed using the mesh convergion tool ("mpas_mesh_converter.cpp" in the -"grid_gen/mesh_conversion_tools/" subdir). E.g., invoking ... - -> mpas_mesh_converter ./grid.nc - -...will create a full mpas mesh called "mesh.nc". - -These alterations were made to greatly speed up the mesh generation process for large meshes. In -periodic_grid.F, only the necessary fields are constructed and written to netcdf, and these are -allocated, written, and deallocated in sequence to minimze the memory footprint. In -module_write_netcdf.F, the minimal number of fields are included and all fields are optional so -that the netcdf write command can be called as many times as needed in periodic_grid.F. - -S. Price, 6-11-15 diff --git a/grid_gen/periodic_hex_minimal/module_cell_indexing.F b/grid_gen/periodic_hex_minimal/module_cell_indexing.F deleted file mode 100644 index b360cf6ff..000000000 --- a/grid_gen/periodic_hex_minimal/module_cell_indexing.F +++ /dev/null @@ -1,170 +0,0 @@ -module cell_indexing - -! this subroutine provide index mapping for hexagon meshes dimensioned (nx, ny) - - integer, parameter :: maxEdges = 6 - - integer :: nx, ny, nVertLevels, nTracers, vertexDegree - real (kind=8) :: dc - integer, dimension(20) :: nproc - - - contains - - - subroutine cell_indexing_read_nl() - - implicit none - - namelist /periodic_grid/ nx, ny, dc, nVertLevels, nTracers, nproc, vertexDegree - - nx = 200 - ny = 200 - dc = 10000. - nVertLevels = 1 - nTracers = 2 - nproc(:) = -1 - vertexDegree = 3 - - open(20,file='namelist.input',status='old') - read(20,periodic_grid) - close(20) - - if (mod(ny, 2) /= 0) then - print *, "Error: ny must be divisible by 2 for the grid's periodicity to work properly." - print *, "Please adjust ny in your namelist file and rerun the program." - call exit() - endif - - end subroutine cell_indexing_read_nl - - - subroutine cellColRow(idx, iCol, iRow) - - implicit none - - integer, intent(in) :: idx - integer, intent(out) :: iCol, iRow - - iRow = ((idx-1) / nx) + 1 - iCol = mod((idx-1), nx) + 1 - - end subroutine cellColRow - - - integer function cellIdx(iCol, iRow) - - implicit none - - integer, intent(in) :: iCol, iRow - - cellIdx = (iRow-1)*nx + iCol - - end function cellIdx - - - integer function cellOnCell(iCol, iRow, neighborNumber) - - implicit none - - integer, intent(in) :: iCol, iRow, neighborNumber - - integer :: mx, px, my, py - - mx = iCol - 1 - if (mx == 0) mx = nx - my = iRow - 1 - if (my == 0) my = ny - px = iCol + 1 - if (px == nx + 1) px = 1 - py = iRow + 1 - if (py == ny + 1) py = 1 - - if (mod(iRow,2) == 1) then - if (neighborNumber == 1) then - cellOnCell = cellIdx(mx, iRow) - else if (neighborNumber == 2) then - cellOnCell = cellIdx(mx, my) - else if (neighborNumber == 3) then - cellOnCell = cellIdx(iCol, my) - else if (neighborNumber == 4) then - cellOnCell = cellIdx(px, iRow) - else if (neighborNumber == 5) then - cellOnCell = cellIdx(iCol, py) - else if (neighborNumber == 6) then - cellOnCell = cellIdx(mx, py) - end if - else - if (neighborNumber == 1) then - cellOnCell = cellIdx(mx, iRow) - else if (neighborNumber == 2) then - cellOnCell = cellIdx(iCol, my) - else if (neighborNumber == 3) then - cellOnCell = cellIdx(px, my) - else if (neighborNumber == 4) then - cellOnCell = cellIdx(px, iRow) - else if (neighborNumber == 5) then - cellOnCell = cellIdx(px, py) - else if (neighborNumber == 6) then - cellOnCell = cellIdx(iCol, py) - end if - end if - - end function cellOnCell - - - integer function edgeOnCell(iCell, neighborNumber) - - implicit none - - integer, intent(in) :: iCell, neighborNumber - - integer :: myRow, myCol - - call cellColRow(iCell, myCol, myRow) - - if (neighborNumber == 1) then - edgeOnCell = 3*(iCell - 1) + 1 - else if (neighborNumber == 2) then - edgeOnCell = 3*(iCell - 1) + 2 - else if (neighborNumber == 3) then - edgeOnCell = 3*(iCell - 1) + 3 - else if (neighborNumber == 4) then - edgeOnCell = 3*(cellOnCell(myCol, myRow, 4) - 1) + 1 - else if (neighborNumber == 5) then - edgeOnCell = 3*(cellOnCell(myCol, myRow, 5) - 1) + 2 - else if (neighborNumber == 6) then - edgeOnCell = 3*(cellOnCell(myCol, myRow, 6) - 1) + 3 - end if - - end function edgeOnCell - - - integer function vertexOnCell(iCell, neighborNumber) - - implicit none - - integer, intent(in) :: iCell, neighborNumber - - integer :: myRow, myCol - - call cellColRow(iCell, myCol, myRow) - - if (neighborNumber == 1) then - vertexOnCell = 2*(iCell - 1) + 1 - else if (neighborNumber == 2) then - vertexOnCell = 2*(iCell - 1) + 2 - else if (neighborNumber == 3) then - vertexOnCell = 2*(cellOnCell(myCol, myRow, 3) - 1) + 1 - else if (neighborNumber == 4) then - vertexOnCell = 2*(cellOnCell(myCol, myRow, 4) - 1) + 2 - else if (neighborNumber == 5) then - vertexOnCell = 2*(cellOnCell(myCol, myRow, 4) - 1) + 1 - else if (neighborNumber == 6) then - vertexOnCell = 2*(cellOnCell(myCol, myRow, 5) - 1) + 2 - end if - - end function vertexOnCell - - -end module cell_indexing diff --git a/grid_gen/periodic_hex_minimal/module_write_netcdf.F b/grid_gen/periodic_hex_minimal/module_write_netcdf.F deleted file mode 100644 index b8cbee3a9..000000000 --- a/grid_gen/periodic_hex_minimal/module_write_netcdf.F +++ /dev/null @@ -1,211 +0,0 @@ -module write_netcdf - - integer :: wr_ncid - integer :: wrDimIDnCells - integer :: wrDimIDnEdges - integer :: wrDimIDnVertices - integer :: wrDimIDmaxEdges - integer :: wrDimIDmaxEdges2 - integer :: wrDimIDTWO - integer :: wrDimIDvertexDegree - integer :: wrVarIDxCell - integer :: wrVarIDyCell - integer :: wrVarIDzCell - integer :: wrVarIDxVertex - integer :: wrVarIDyVertex - integer :: wrVarIDzVertex - integer :: wrVarIDcellsOnVertex - - integer :: wrLocalnCells - integer :: wrLocalnVertices - - contains - - subroutine write_netcdf_init( & - nCells, & - nEdges, & - nVertices, & - maxEdges, & - vertexDegree, & - dc, & - nx, & - ny ) - - implicit none - - include 'netcdf.inc' - - integer, intent(in) :: nCells - integer, intent(in) :: nEdges - integer, intent(in) :: nVertices - integer, intent(in) :: maxEdges - integer, intent(in) :: vertexDegree - real (kind=8), intent(in) :: dc - integer, intent(in) :: nx - integer, intent(in) :: ny - - integer :: nferr - integer, dimension(10) :: dimlist - character (len=16) :: on_a_sphere - character (len=16) :: is_periodic - real (kind=8) :: sphere_radius - real (kind=8) :: x_period, y_period - - - wrLocalnCells = nCells - wrLocalnVertices = nVertices - - on_a_sphere = 'NO' - is_periodic = 'YES' - sphere_radius = 0.0 - x_period = (nx) * dc - y_period = (ny) * (dc * sqrt(3.0)) / 2.0 - - nferr = nf_create('grid.nc', IOR(NF_CLOBBER,NF_64BIT_OFFSET), wr_ncid) - - ! - ! Define dimensions - ! - nferr = nf_def_dim(wr_ncid, 'nCells', nCells, wrDimIDnCells) - nferr = nf_def_dim(wr_ncid, 'nEdges', nEdges, wrDimIDnEdges) - nferr = nf_def_dim(wr_ncid, 'nVertices', nVertices, wrDimIDnVertices) - nferr = nf_def_dim(wr_ncid, 'maxEdges', maxEdges, wrDimIDmaxEdges) - nferr = nf_def_dim(wr_ncid, 'maxEdges2', 2*maxEdges, wrDimIDmaxEdges2) - nferr = nf_def_dim(wr_ncid, 'TWO', 2, wrDimIDTWO) - nferr = nf_def_dim(wr_ncid, 'vertexDegree', vertexDegree, wrDimIDvertexDegree) - - - ! - ! Define attributes - ! - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'on_a_sphere', 16, on_a_sphere) - nferr = nf_put_att_text(wr_ncid, NF_GLOBAL, 'is_periodic', 16, is_periodic) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'sphere_radius', NF_DOUBLE, 1, sphere_radius) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'x_period', NF_DOUBLE, 1, x_period) - nferr = nf_put_att_double(wr_ncid, NF_GLOBAL, 'y_period', NF_DOUBLE, 1, y_period) - - - ! - ! Define variables - ! - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'xCell', NF_DOUBLE, 1, dimlist, wrVarIDxCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'yCell', NF_DOUBLE, 1, dimlist, wrVarIDyCell) - dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'zCell', NF_DOUBLE, 1, dimlist, wrVarIDzCell) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'xVertex', NF_DOUBLE, 1, dimlist, wrVarIDxVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'yVertex', NF_DOUBLE, 1, dimlist, wrVarIDyVertex) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'zVertex', NF_DOUBLE, 1, dimlist, wrVarIDzVertex) - dimlist( 1) = wrDimIDvertexDegree - dimlist( 2) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'cellsOnVertex', NF_INT, 2, dimlist, wrVarIDcellsOnVertex) - - nferr = nf_enddef(wr_ncid) - - end subroutine write_netcdf_init - - - subroutine write_netcdf_fields( & - xCell, & - yCell, & - zCell, & - xVertex, & - yVertex, & - zVertex, & - cellsOnVertex ) - - implicit none - - include 'netcdf.inc' - - real (kind=8), dimension(:), optional, intent(in) :: xCell - real (kind=8), dimension(:), optional, intent(in) :: yCell - real (kind=8), dimension(:), optional, intent(in) :: zCell - real (kind=8), dimension(:), optional, intent(in) :: xVertex - real (kind=8), dimension(:), optional, intent(in) :: yVertex - real (kind=8), dimension(:), optional, intent(in) :: zVertex - integer, dimension(:,:), optional, intent(in) :: cellsOnVertex - - integer :: nferr - integer, dimension(1) :: start1, count1 - integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 - - start1(1) = 1 - - start2(1) = 1 - start2(2) = 1 - - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start4(1) = 1 - start4(2) = 1 - start4(3) = 1 - start4(4) = 1 - - if(present(xCell))then - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDxCell, start1, count1, xCell) - endif - - if(present(yCell))then - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDyCell, start1, count1, yCell) - endif - - if(present(zCell))then - start1(1) = 1 - count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDzCell, start1, count1, zCell) - endif - - if(present(xVertex))then - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDxVertex, start1, count1, xVertex) - endif - - if(present(yVertex))then - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDyVertex, start1, count1, yVertex) - endif - - if(present(zVertex))then - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDzVertex, start1, count1, zVertex) - endif - - if(present(cellsOnVertex))then - start2(2) = 1 - count2( 1) = 3 - count2( 2) = wrLocalnVertices - nferr = nf_put_vara_int(wr_ncid, wrVarIDcellsOnVertex, start2, count2, cellsOnVertex) - endif - - end subroutine write_netcdf_fields - - - subroutine write_netcdf_finalize() - - implicit none - - include 'netcdf.inc' - - integer :: nferr - - nferr = nf_close(wr_ncid) - - end subroutine write_netcdf_finalize - -end module write_netcdf diff --git a/grid_gen/periodic_hex_minimal/namelist.input b/grid_gen/periodic_hex_minimal/namelist.input deleted file mode 100644 index 6faa7f857..000000000 --- a/grid_gen/periodic_hex_minimal/namelist.input +++ /dev/null @@ -1,5 +0,0 @@ -&periodic_grid - nx = 500, - ny = 500, - dc = 10000., -/ diff --git a/grid_gen/periodic_hex_minimal/periodic_grid.F b/grid_gen/periodic_hex_minimal/periodic_grid.F deleted file mode 100644 index 53caa2e41..000000000 --- a/grid_gen/periodic_hex_minimal/periodic_grid.F +++ /dev/null @@ -1,103 +0,0 @@ -program hexagonal_periodic_grid - - use cell_indexing - use write_netcdf - - implicit none - - real (kind=8), parameter :: pi = 3.141592653589793 - real (kind=8), parameter :: ONE = 1.0_8 - real (kind=8), parameter :: TWO = 2.0_8 - real (kind=8), parameter :: THREE = 3.0_8 - real (kind=8), parameter :: FOUR = 4.0_8 - real (kind=8), parameter :: SIX = 6.0_8 - - integer, allocatable, dimension(:,:) :: verticesOnCell, cellsOnVertex - real (kind=8), allocatable, dimension(:) :: xCell, yCell, zCell - real (kind=8), allocatable, dimension(:) :: xVertex, yVertex, zVertex - - integer :: i, j, np, iCell - integer :: nCells, nEdges, nVertices - integer :: iRow, iCol, ii, jj - integer :: nprocx, nprocy - real (kind=8) :: r - character (len=32) :: decomp_fname - - call cell_indexing_read_nl() - - nCells = nx*ny - nEdges = 3*nCells - nVertices = 2*nCells - - call write_netcdf_init( nCells, nEdges, nVertices, maxEdges, vertexDegree, dc, nx, ny ) - - allocate(verticesOnCell(maxEdges, nCells)) - - do iRow = 1, ny - do iCol = 1, nx - iCell = cellIdx(iCol,iRow) - do j=1,maxEdges - verticesOnCell(j,iCell) = vertexOnCell(iCell,j) - end do - end do - end do - - allocate(cellsOnVertex(3,nVertices)) - - do iRow = 1, ny - do iCol = 1, nx - iCell = cellIdx(iCol,iRow) - cellsOnVertex(3,verticesOnCell(2,iCell)) = iCell - cellsOnVertex(1,verticesOnCell(4,iCell)) = iCell - cellsOnVertex(2,verticesOnCell(6,iCell)) = iCell - cellsOnVertex(1,verticesOnCell(1,iCell)) = iCell - cellsOnVertex(2,verticesOnCell(3,iCell)) = iCell - cellsOnVertex(3,verticesOnCell(5,iCell)) = iCell - end do - end do - - call write_netcdf_fields( cellsOnVertex=cellsOnVertex ) - - allocate(xCell(nCells)) - allocate(yCell(nCells)) - allocate(zCell(nCells)) - allocate(xVertex(nVertices)) - allocate(yVertex(nVertices)) - allocate(zVertex(nVertices)) - - do iRow = 1, ny - do iCol = 1, nx - iCell = cellIdx(iCol, iRow) - if (mod(iRow,2) == 1) then - xCell(iCell) = dc*real(iCol) - 0.5*dc - yCell(iCell) = dc*real(iRow)*sqrt(THREE) / TWO - zCell(iCell) = 0.0 - else - xCell(iCell) = dc*real(iCol) - yCell(iCell) = dc*real(iRow)*sqrt(THREE) / TWO - zCell(iCell) = 0.0 - end if - xVertex(verticesOnCell(1,iCell)) = xCell(iCell) - 0.5*dc - yVertex(verticesOnCell(1,iCell)) = yCell(iCell) + dc * sqrt(THREE) / SIX - zVertex(verticesOnCell(1,iCell)) = 0.0 - xVertex(verticesOnCell(2,iCell)) = xCell(iCell) - 0.5*dc - yVertex(verticesOnCell(2,iCell)) = yCell(iCell) - dc * sqrt(THREE) / SIX - zVertex(verticesOnCell(2,iCell)) = 0.0 - end do - end do - - deallocate(verticesOnCell) - - call write_netcdf_fields( xCell=xCell, yCell=yCell, zCell=zCell ) - deallocate(xCell) - deallocate(yCell) - deallocate(zCell) - - call write_netcdf_fields( xVertex=xVertex, yVertex=yVertex, zVertex=zVertex ) - deallocate(xVertex) - deallocate(yVertex) - deallocate(zVertex) - - call write_netcdf_finalize() - -end program hexagonal_periodic_grid diff --git a/grid_gen/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py b/landice/mesh_tools_li/conversion_exodus_init_to_mpasli_mesh.py similarity index 90% rename from grid_gen/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py rename to landice/mesh_tools_li/conversion_exodus_init_to_mpasli_mesh.py index 2b990db58..7956c3dd7 100755 --- a/grid_gen/landice_grid_tools/conversion_exodus_init_to_mpasli_mesh.py +++ b/landice/mesh_tools_li/conversion_exodus_init_to_mpasli_mesh.py @@ -1,10 +1,14 @@ #!/usr/bin/env python """ +Script to convert Albany-Land Ice output file in Exodus format to an MPAS-Land Ice format mesh. + Created on Tue Feb 13 23:50:20 2018 @author: Tong Zhang, Matt Hoffman """ +from __future__ import absolute_import, division, print_function, unicode_literals + import numpy as np from netCDF4 import Dataset from optparse import OptionParser @@ -61,20 +65,20 @@ # change the unit of the exo coord data from km to m. Be careful if it changes in the future if ordering == 1.0: - print "column wise pattern" + print("column wise pattern") layer_num = int(stride) data_exo_layer = data_exo[::layer_num] x_exo_layer = x_exo[::layer_num] y_exo_layer = y_exo[::layer_num] elif ordering == 0.0: - print "layer wise pattern" + print("layer wise pattern") node_num = int(stride) data_exo_layer = data_exo[0:node_num+1] x_exo_layer = x_exo[0:node_num+1] y_exo_layer = y_exo[0:node_num+1] - layer_num = int(len(data_exo)/node_num) + layer_num = len(data_exo)//node_num else: - print "The ordering is probably wrong" + print("The ordering is probably wrong") # slice the exo data to get the MPAS data node_num_layer = len(x_exo_layer) @@ -83,7 +87,7 @@ # set beta value to some uniform value before we put new data in it if (options.conversion_method == 'coord'): - print "use coordinate method" + print("use coordinate method") for i in range(node_num_layer): index_x, = np.where(abs(x[:]-x_exo_layer[i])/(abs(x[:])+1e-10)<1e-3) index_y, = np.where(abs(y[:]-y_exo_layer[i])/(abs(y[:])+1e-10)<1e-3) @@ -98,7 +102,7 @@ # This method may fail at the point where x or y = 0, while x_exo or y_exo is not elif (options.conversion_method == 'id'): - print "use global id method. Need a global id file" + print("use global id method. Need a global id file") usefullCellID = np.loadtxt(options.id_file,dtype='i') usefullCellID_array = usefullCellID[1::] # The first number in the file is the total number. skip it @@ -112,7 +116,7 @@ else: sys.exit("wrong conversion method! Set option m as id or coord!") -print "Successful in converting data from Exodus to MPAS!" +print("Successful in converting data from Exodus to MPAS!") nCells = len(dataset.dimensions['nCells']) thickness = dataset.variables['thickness'][0,:] @@ -141,10 +145,10 @@ # 5) Update mask # 6) go to step 1) -print "\nStart extrapolation!" +print("\nStart extrapolation!") while np.count_nonzero(keepCellMask) != nCells: - + keepCellMask = np.copy(keepCellMaskNew) searchCells = np.where(keepCellMask==0)[0] @@ -164,7 +168,7 @@ dataset.variables[options.var_name][0,iCell] = sum(dataset.variables[options.var_name][0,nonzero_id])/nonzero_num keepCellMask[iCell] = 1 - print ("{0:8d} cells left for extrapolation in total {1:8d} cells".format(nCells-np.count_nonzero(keepCellMask), nCells)) + print("{0:8d} cells left for extrapolation in total {1:8d} cells".format(nCells-np.count_nonzero(keepCellMask), nCells)) else: @@ -200,10 +204,10 @@ keepCellMaskNew[iCell] = 1 - print ("{0:8d} cells left for extrapolation in total {1:8d} cells".format(nCells-np.count_nonzero(keepCellMask), nCells)) + print("{0:8d} cells left for extrapolation in total {1:8d} cells".format(nCells-np.count_nonzero(keepCellMask), nCells)) -print "\nStart idw smoothing for extrapolated field!" +print("\nStart idw smoothing for extrapolated field!") iter_num = 0 while iter_num < int(options.smooth_iter_num): @@ -226,7 +230,7 @@ dataset.variables[options.var_name][0,iCell] = sum(dataset.variables[options.var_name][0,nonzero_id])/nonzero_num - print ("{0:3d} smoothing in total {1:3s} iters".format(iter_num, options.smooth_iter_num)) + print("{0:3d} smoothing in total {1:3s} iters".format(iter_num, options.smooth_iter_num)) else: @@ -254,13 +258,13 @@ var_interp = 1.0/sum(1.0/ds)*sum(1.0/ds*var_adj) dataset.variables[options.var_name][0,iCell] = var_interp - print ("{0:3d} smoothing in total {1:3s} iters".format(iter_num, options.smooth_iter_num)) + print("{0:3d} smoothing in total {1:3s} iters".format(iter_num, options.smooth_iter_num)) iter_num = iter_num + 1 if iter_num == 0: - print "\nNo smoothing! Iter number is 0!" + print("\nNo smoothing! Iter number is 0!") -print "\nExtrapolation and smoothing finished!" +print("\nExtrapolation and smoothing finished!") dataset.close() diff --git a/grid_gen/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py similarity index 81% rename from grid_gen/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py rename to landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py index c50c71b37..65276e3a9 100755 --- a/grid_gen/landice_grid_tools/create_landice_grid_from_generic_MPAS_grid.py +++ b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py @@ -1,7 +1,12 @@ #!/usr/bin/env python -# Script to create a grid with land ice variables from an MPAS grid. -# I've only tested it with a periodic_hex grid, but it should work with any MPAS grid. -# Currently variable attributes are not copied (and periodic_hex does not assign any, so this is ok). If variable attributes are added to periodic_hex, this script should be modified to copy them (looping over dir(var), skipping over variable function names "assignValue", "getValue", "typecode"). +""" +Script to create a grid with land ice variables from an MPAS grid. +Currently variable attributes are not copied. +This script could be modified to copy them (looping over dir(var), skipping over variable function names "assignValue", "getValue", "typecode"). +""" + +from __future__ import absolute_import, division, print_function, \ + unicode_literals import sys, numpy from netCDF4 import Dataset @@ -11,13 +16,14 @@ sphere_radius = 6.37122e6 # earth radius, if needed -print "** Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("** Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser() parser.add_option("-i", "--in", dest="fileinName", help="input filename. Defaults to 'grid.nc'", metavar="FILENAME") parser.add_option("-o", "--out", dest="fileoutName", help="output filename. Defaults to 'landice_grid.nc'", metavar="FILENAME") parser.add_option("-l", "--level", dest="levels", help="Number of vertical levels to use in the output file. Defaults to the number in the input file", metavar="FILENAME") parser.add_option("-v", "--vert", dest="vertMethod", help="Method of vertical layer spacing: uniform, glimmer. Glimmer spacing follows Eq. 35 of Rutt, I. C., M. Hagdorn, N. R. J. Hulton, and A. J. Payne (2009), The Glimmer community ice sheet model, J. Geophys. Res., 114, F02004, doi:10.1029/2008JF001015", default='uniform', metavar="FILENAME") parser.add_option("--beta", dest="beta", action="store_true", help="Use this flag to include the field 'beta' in the resulting file.") +parser.add_option("--effecpress", dest="effecpress", action="store_true", help="Use this flag to include the field 'effectivePressure' in the resulting file.") parser.add_option("--diri", dest="dirichlet", action="store_true", help="Use this flag to include the fields 'dirichletVelocityMask', 'uReconstructX', 'uReconstructY' needed for specifying Dirichlet velocity boundary conditions in the resulting file.") parser.add_option("--thermal", dest="thermal", action="store_true", help="Use this flag to include the fields 'temperature', 'surfaceAirTemperature', 'basalHeatFlux' needed for specifying thermal initial conditions in the resulting file.") parser.add_option("--hydro", dest="hydro", action="store_true", help="Use this flag to include the fields 'waterThickness', 'tillWaterThickness', 'basalMeltInput', 'externalWaterInput', 'frictionAngle', 'waterPressure', 'waterFluxMask' needed for specifying hydro initial conditions in the resulting file.") @@ -25,14 +31,14 @@ options, args = parser.parse_args() if not options.fileinName: - print "No input filename specified, so using 'grid.nc'." + print("No input filename specified, so using 'grid.nc'.") options.fileinName = 'grid.nc' else: - print "Input file is:", options.fileinName + print("Input file is: {}".format(options.fileinName)) if not options.fileoutName: - print "No output filename specified, so using 'landice_grid.nc'." + print("No output filename specified, so using 'landice_grid.nc'.") options.fileoutName = 'landice_grid.nc' -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output # Get the input file filein = Dataset(options.fileinName,'r') @@ -46,12 +52,12 @@ # ============================================ # Do this first as doing it last is slow for big files since adding # attributes forces the contents to get reorganized. -print "---- Copying global attributes from input file to output file ----" +print("---- Copying global attributes from input file to output file ----") for name in filein.ncattrs(): # sphere radius needs to be set to that of the earth if on a sphere if name == 'sphere_radius' and getattr(filein, 'on_a_sphere') == "YES ": setattr(fileout, 'sphere_radius', sphere_radius) - print 'Set global attribute sphere_radius = ', str(sphere_radius) + print('Set global attribute sphere_radius = {}'.format(sphere_radius)) elif name =='history': # Update history attribute of netCDF file newhist = '\n'.join([getattr(filein, 'history'), ' '.join(sys.argv[:]) ] ) @@ -59,14 +65,14 @@ else: # Otherwise simply copy the attr setattr(fileout, name, getattr(filein, name) ) - print 'Copied global attribute ', name, '=', getattr(filein, name) + print('Copied global attribute {} = {}'.format(name, getattr(filein, name))) # Update history attribute of netCDF file if we didn't above if not hasattr(fileout, 'history'): setattr(fileout, 'history', sys.argv[:] ) fileout.sync() -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output # ============================================ @@ -77,7 +83,7 @@ # It may be better to list them explicitly as I do for the grid variables, # but this way ensures they all get included and is easier. # Note: The UNLIMITED time dimension will return a dimension value of None with Scientific.IO. This is what is supposed to happen. See below for how to deal with assigning values to a variable with a unlimited dimension. Special handling is needed with the netCDF module. -print "---- Copying dimensions from input file to output file ----" +print("---- Copying dimensions from input file to output file ----") for dim in filein.dimensions.keys(): if dim == 'nTracers': pass # Do nothing - we don't want this dimension @@ -90,12 +96,12 @@ if options.levels is None: # If nVertLevels is in the input file, and a value for it was not # specified on the command line, then use the value from the file (do nothing here) - print "Using nVertLevels from the intput file:", len(filein.dimensions[dim]) + print("Using nVertLevels from the intput file: {}".format(len(filein.dimensions[dim]))) dimvalue = len(filein.dimensions[dim]) else: # if nVertLevels is in the input file, but a value WAS specified # on the command line, then use the command line value - print "Using nVertLevels specified on the command line:", int(options.levels) + print("Using nVertLevels specified on the command line: {}".format(int(options.levels))) dimvalue = int(options.levels) else: dimvalue = len(filein.dimensions[dim]) @@ -104,22 +110,22 @@ # it has not been added to the output file yet. Treat those here. if 'nVertLevels' not in fileout.dimensions: if options.levels is None: - print "nVertLevels not in input file and not specified. Using default value of 10." + print("nVertLevels not in input file and not specified. Using default value of 10.") fileout.createDimension('nVertLevels', 10) else: - print "Using nVertLevels specified on the command line:", int(options.levels) + print("Using nVertLevels specified on the command line: {}".format(int(options.levels))) fileout.createDimension('nVertLevels', int(options.levels)) # Also create the nVertInterfaces dimension, even if none of the variables require it. fileout.createDimension('nVertInterfaces', len(fileout.dimensions['nVertLevels']) + 1) # nVertInterfaces = nVertLevels + 1 -print 'Added new dimension nVertInterfaces to output file with value of ' + str(len(fileout.dimensions['nVertInterfaces'])) + '.' +print('Added new dimension nVertInterfaces to output file with value of {}.'.format(len(fileout.dimensions['nVertInterfaces']))) fileout.sync() -print 'Finished creating dimensions in output file.\n' # include an extra blank line here +print('Finished creating dimensions in output file.\n') # include an extra blank line here # ============================================ # Copy over all of the required grid variables to the new file # ============================================ -print "Beginning to copy mesh variables to output file." +print("Beginning to copy mesh variables to output file.") vars2copy = ['latCell', 'lonCell', 'xCell', 'yCell', 'zCell', 'indexToCellID', 'latEdge', 'lonEdge', 'xEdge', 'yEdge', 'zEdge', 'indexToEdgeID', 'latVertex', 'lonVertex', 'xVertex', 'yVertex', 'zVertex', 'indexToVertexID', 'cellsOnEdge', 'nEdgesOnCell', 'nEdgesOnEdge', 'edgesOnCell', 'edgesOnEdge', 'weightsOnEdge', 'dvEdge', 'dcEdge', 'angleEdge', 'areaCell', 'areaTriangle', 'cellsOnCell', 'verticesOnCell', 'verticesOnEdge', 'edgesOnVertex', 'cellsOnVertex', 'kiteAreasOnVertex'] # Add these optional fields if they exist in the input file for optionalVar in ['meshDensity', 'gridSpacing', 'cellQuality', 'triangleQuality', 'triangleAngleQuality', 'obtuseTriangle']: @@ -127,8 +133,8 @@ vars2copy.append(optionalVar) for varname in vars2copy: - print "-", -print "|" + print("- ", end='') +print("|") for varname in vars2copy: thevar = filein.variables[varname] datatype = thevar.dtype @@ -145,8 +151,8 @@ del newVar, thevar sys.stdout.write("* "); sys.stdout.flush() fileout.sync() -print "|" -print "Finished copying mesh variables to output file.\n" +print("|") +print("Finished copying mesh variables to output file.\n") # ============================================ # Create the land ice variables (all the shallow water vars in the input file can be ignored) @@ -169,7 +175,7 @@ layerInterfaces[k] = 4.0/3.0 * (1.0 - ((k+1.0-1.0)/(nInterfaces-1.0) + 1.0)**-2) for k in range(nVertLevels): layerThicknessFractionsData[k] = layerInterfaces[k+1] - layerInterfaces[k] - print "Setting layerThicknessFractions to:", layerThicknessFractionData + print("Setting layerThicknessFractions to: {}".format(layerThicknessFractionsData)) else: sys.exit('Unknown method for vertical spacing method (--vert): '+options.vertMethod) @@ -191,12 +197,17 @@ newvar[:] = numpy.zeros(newvar.shape) newvar = fileout.createVariable('floatingBasalMassBal', datatype, ('Time', 'nCells')) newvar[:] = numpy.zeros(newvar.shape) -print 'Added default variables: thickness, temperature, bedTopography, sfcMassBal, floatingBasalMassBal' +print('Added default variables: thickness, temperature, bedTopography, sfcMassBal, floatingBasalMassBal') if options.beta: newvar = fileout.createVariable('beta', datatype, ('Time', 'nCells')) newvar[:] = 1.0e8 # Give a default beta that won't have much sliding. - print 'Added optional variable: beta' + print('Added optional variable: beta') + +if options.effecpress: + newvar = fileout.createVariable('effectivePressure', datatype, ('Time', 'nCells')) + newvar[:] = 1.0e8 # Give a default effective pressure that won't have much sliding. + print('Added optional variable: effectivePressure') if options.dirichlet: newvar = fileout.createVariable('dirichletVelocityMask', datatypeInt, ('Time', 'nCells', 'nVertInterfaces')) @@ -205,7 +216,7 @@ newvar[:] = 0.0 newvar = fileout.createVariable('uReconstructY', datatype, ('Time', 'nCells', 'nVertInterfaces',)) newvar[:] = 0.0 - print 'Added optional dirichlet variables: dirichletVelocityMask, uReconstructX, uReconstructY' + print('Added optional dirichlet variables: dirichletVelocityMask, uReconstructX, uReconstructY') if options.thermal: newvar = fileout.createVariable('temperature', datatype, ('Time', 'nCells', 'nVertLevels')) @@ -214,7 +225,7 @@ newvar[:] = 273.15 # Give default value for temperate ice newvar = fileout.createVariable('basalHeatFlux', datatype, ('Time', 'nCells')) newvar[:] = 0.0 # Default to none (W/m2) - print 'Added optional thermal variables: temperature, surfaceAirTemperature, basalHeatFlux' + print('Added optional thermal variables: temperature, surfaceAirTemperature, basalHeatFlux') if options.hydro: newvar = fileout.createVariable('waterThickness', datatype, ('Time', 'nCells')) @@ -231,7 +242,7 @@ newvar[:] = 0.0 newvar = fileout.createVariable('waterFluxMask', 'i', ('Time', 'nEdges')) newvar[:] = 0.0 - print 'Added optional hydro variables: waterThickness, tillWaterThickness, meltInput, frictionAngle, waterPressure, waterFluxMask' + print('Added optional hydro variables: waterThickness, tillWaterThickness, meltInput, frictionAngle, waterPressure, waterFluxMask') if options.obs: newvar = fileout.createVariable('observedSurfaceVelocityX', datatype, ('Time', 'nCells')) @@ -246,7 +257,7 @@ newvar[:] = 0.0 newvar = fileout.createVariable('thicknessUncertainty', datatype, ('Time', 'nCells')) newvar[:] = 0.0 - print 'Added optional velocity optimization variables: observedSurfaceVelocityX, observedSurfaceVelocityY, observedSurfaceVelocityUncertainty, observedThicknessTendency, observedThicknessTendencyUncertainty, thicknessUncertainty' + print('Added optional velocity optimization variables: observedSurfaceVelocityX, observedSurfaceVelocityY, observedSurfaceVelocityUncertainty, observedThicknessTendency, observedThicknessTendencyUncertainty, thicknessUncertainty') # Update history attribute of netCDF file thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) @@ -256,10 +267,10 @@ newhist = thiscommand setattr(fileout, 'history', newhist ) -print "Completed creating land ice variables in new file. Now syncing to file." +print("Completed creating land ice variables in new file. Now syncing to file.") fileout.sync() filein.close() fileout.close() -print '\n** Successfully created ' + options.fileoutName + '.**' +print('\n** Successfully created {}.**'.format(options.fileoutName)) diff --git a/grid_gen/landice_grid_tools/define_cullMask.py b/landice/mesh_tools_li/define_cullMask.py similarity index 76% rename from grid_gen/landice_grid_tools/define_cullMask.py rename to landice/mesh_tools_li/define_cullMask.py index 6406df82a..18343a871 100755 --- a/grid_gen/landice_grid_tools/define_cullMask.py +++ b/landice/mesh_tools_li/define_cullMask.py @@ -1,6 +1,11 @@ #!/usr/bin/env python -# Script for adding a field named cullMask to an MPAS land ice grid for use with the MpasCellCuller tool that actually culls the unwanted cells. -# Matt Hoffman, February 28, 2013 +""" +Script for adding a field named cullMask to an MPAS land ice grid for use with the MpasCellCuller tool that actually culls the unwanted cells. +Matt Hoffman, February 28, 2013 +""" + +from __future__ import absolute_import, division, print_function, \ + unicode_literals import sys import numpy as np @@ -9,17 +14,17 @@ from datetime import datetime -print "** Gathering information." +print("** Gathering information.") parser = OptionParser() parser.add_option("-f", "--file", dest="file", help="grid file to modify; default: landice_grid.nc", metavar="FILE") parser.add_option("-m", "--method", dest="method", help="method to use for marking cells to cull. Supported methods: 'noIce', 'numCells', 'distance', 'radius', 'edgeFraction'", metavar="METHOD") parser.add_option("-n", "--numCells", dest="numCells", default=5, help="number of cells to keep beyond ice extent", metavar="NUM") -parser.add_option("-d", "--distance", dest="distance", default=50, help="distance (km) beyond ice extent to keep", metavar="DIST") +parser.add_option("-d", "--distance", dest="distance", default=50, help="numeric value to use for the various methods: distance method->distance (km), radius method->radius (km), edgeFraction method->fraction of width or height", metavar="DIST") parser.add_option("-p", "--plot", dest="makePlot", help="Include to have the script generate a plot of the resulting mask, default=false", default=False, action="store_true") options, args = parser.parse_args() if not options.file: - print "No grid filename provided. Using landice_grid.nc." + print("No grid filename provided. Using landice_grid.nc.") options.file = "landice_grid.nc" if not options.method: @@ -46,10 +51,10 @@ thicknessMissing = True try: thickness = f.variables['thickness'][0,:] - print 'Using thickness field at time 0' + print('Using thickness field at time 0') thicknessMissing = False except: - print "The field 'thickness' is not available. Some culling methods will not work." + print("The field 'thickness' is not available. Some culling methods will not work.") # ===== Various methods for defining the mask ==== @@ -57,7 +62,7 @@ # ========= # only keep cells with ice if maskmethod == 'noIce': - print "Method: remove cells without ice" + print("Method: remove cells without ice") if thicknessMissing: sys.exit("Unable to perform 'numCells' method because thickness field was missing.") @@ -66,13 +71,13 @@ # ========= # add a buffer of X cells around the ice elif maskmethod == 'numCells': - print "Method: remove cells beyond a certain number of cells from existing ice" + print("Method: remove cells beyond a certain number of cells from existing ice") if thicknessMissing: sys.exit("Unable to perform 'numCells' method because thickness field was missing.") buffersize=int(options.numCells) # number of cells to expand - print "Using a buffer of {} cells".format(buffersize) + print("Using a buffer of {} cells".format(buffersize)) keepCellMask = np.copy(cullCell[:]) keepCellMask[:] = 0 @@ -81,17 +86,17 @@ # mark the cells with ice first keepCellMask[thickness > 0.0] = 1 - print 'Num of cells with ice:', sum(keepCellMask) + print('Num of cells with ice: {}'.format(sum(keepCellMask))) for i in range(buffersize): - print 'Starting buffer loop ', i+1 + print('Starting buffer loop {}'.format(i+1)) keepCellMaskNew = np.copy(keepCellMask) # make a copy to edit that can be edited without changing the original ind = np.nonzero(keepCellMask == 0)[0] for i in range(len(ind)): iCell = ind[i] keepCellMaskNew[iCell] = keepCellMask[cellsOnCell[iCell,:nEdgesOnCell[iCell]]-1].max() # if any neighbor has a value of 1, then 1 will get assigned to iCell. keepCellMask = np.copy(keepCellMaskNew) # after we've looped over all cells assign the new mask to the variable we need (either for another loop around the domain or to write out) - print ' Num of cells to keep:', sum(keepCellMask) + print(' Num of cells to keep: {}'.format(sum(keepCellMask))) # Now convert the keepCellMask to the cullMask cullCell[:] = np.absolute(keepCellMask[:]-1) # Flip the mask for which ones to cull @@ -100,13 +105,13 @@ # remove cells beyond a certain distance of ice extent elif maskmethod == 'distance': - print "Method: remove cells beyond a certain distance from existing ice" + print("Method: remove cells beyond a certain distance from existing ice") if thicknessMissing: sys.exit("Unable to perform 'numCells' method because thickness field was missing.") dist=float(options.distance) - print "Using a buffer distance of {} km".format(dist) + print("Using a buffer distance of {} km".format(dist)) dist = dist * 1000.0 # convert to m keepCellMask = np.copy(cullCell[:]) @@ -118,7 +123,7 @@ # mark the cells with ice first keepCellMask[thickness > 0.0] = 1 - print 'Num of cells with ice:', sum(keepCellMask) + print('Num of cells with ice: {}'.format(sum(keepCellMask))) # find list of margin cells iceCells = np.nonzero(keepCellMask == 1)[0] @@ -138,7 +143,7 @@ ind = np.nonzero(((xCell-xCell[iCell])**2 + (yCell-yCell[iCell])**2)**0.5 < dist)[0] keepCellMask[ind] = 1 - print ' Num of cells to keep:', sum(keepCellMask) + print(' Num of cells to keep:'.format(sum(keepCellMask))) # Now convert the keepCellMask to the cullMask cullCell[:] = np.absolute(keepCellMask[:]-1) # Flip the mask for which ones to cull @@ -147,15 +152,22 @@ # ========= # cut out beyond some radius (good for the dome) elif maskmethod == 'radius': - print "Method: remove cells beyond a radius" - ind = np.nonzero( (xCell[:]**2 + yCell[:]**2)**0.5 > 26000.0 ) + dist=float(options.distance) + print("Method: remove cells beyond a radius of {} km from center of mesh".format(dist)) + xc = (xCell.max()-xCell.min())/2.0 + xCell.min() + yc = (yCell.max()-yCell.min())/2.0 + yCell.min() + ind = np.nonzero( ( (xCell[:]-xc)**2 + (yCell[:]-yc)**2)**0.5 > dist*1000.0 ) cullCell[ind] = 1 # ========= # cut off some fraction of the height/width on all 4 sides - useful for cleaning up a mesh from periodic_general elif maskmethod == 'edgeFraction': - print "Method: remove a fraction from all 4 edges" - frac=0.025 + frac=float(options.distance) + print("Method: remove a fraction from all 4 edges of {}".format(frac)) + if frac>=0.5: + sys.exit("ERROR: fraction cannot be >=0.5.") + if frac<0.0: + sys.exit("ERROR: fraction cannot be <0.") cullCell[:] = 0 width = xCell.max()-xCell.min() @@ -175,7 +187,7 @@ # ========= -print 'Num of cells to cull:', sum(cullCell[:]) +print('Num of cells to cull: {}'.format(sum(cullCell[:]))) # ========= # Try to add the new variable @@ -205,6 +217,6 @@ plt.show() f.close() -print "cullMask generation complete." +print("cullMask generation complete.") diff --git a/grid_gen/landice_grid_tools/interpolate_to_mpasli_grid.py b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py similarity index 65% rename from grid_gen/landice_grid_tools/interpolate_to_mpasli_grid.py rename to landice/mesh_tools_li/interpolate_to_mpasli_grid.py index a3eb9997d..08d9c1159 100755 --- a/grid_gen/landice_grid_tools/interpolate_to_mpasli_grid.py +++ b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py @@ -12,6 +12,9 @@ For MPAS input files only barycentric interpolation is supported. ''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import sys import numpy as np import netCDF4 @@ -23,12 +26,12 @@ from datetime import datetime -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)\n" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)\n") parser = OptionParser() parser.description = __doc__ parser.add_option("-s", "--source", dest="inputFile", help="name of source (input) file. Can be either CISM format or MPASLI format.", default="cism.nc", metavar="FILENAME") parser.add_option("-d", "--destination", dest="mpasFile", help="name of destination file on which to interpolate fields. This needs to be MPASLI format with desired fields already existing.", default="landice_grid.nc", metavar="FILENAME") -parser.add_option("-m", "--method", dest="interpType", help="interpolation method to use. b=bilinear, d=barycentric, e=ESMF", default="b", metavar="METHOD") +parser.add_option("-m", "--method", dest="interpType", help="interpolation method to use. b=bilinear, d=barycentric, e=ESMF, n=nearest neighbor", default="b", metavar="METHOD") parser.add_option("-w", "--weight", dest="weightFile", help="ESMF weight file to input. Only used by ESMF interpolation method", metavar="FILENAME") parser.add_option("-t", "--thickness-only", dest="thicknessOnly", action="store_true", default=False, help="Only interpolate thickness and ignore all other variables (useful for setting up a cullMask)") for option in parser.option_list: @@ -36,14 +39,14 @@ option.help += (" " if option.help else "") + "[default: %default]" options, args = parser.parse_args() -print " Source file: " + options.inputFile -print " Destination MPASLI file to be modified: " + options.mpasFile +print(" Source file: {}".format(options.inputFile)) +print(" Destination MPASLI file to be modified: {}".format(options.mpasFile)) -print " Interpolation method to be used: " + options.interpType -print " (b=bilinear, d=barycentric, e=esmf)" +print(" Interpolation method to be used: {}".format(options.interpType)) +print(" (b=bilinear, d=barycentric, e=esmf)") if options.weightFile and options.interpType == 'e': - print " Interpolation will be performed using ESMF-weights method, where possible, using weights file: " + options.weightFile + print(" Interpolation will be performed using ESMF-weights method, where possible, using weights file: {}".format(options.weightFile)) #---------------------------- # Get weights from file wfile = netCDF4.Dataset(options.weightFile, 'r') @@ -53,7 +56,7 @@ wfile.close() #---------------------------- -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output #---------------------------- @@ -109,11 +112,11 @@ def BilinearInterp(Value, gridType): ygrid = len(y) - 2 elif ygrid < 0: ygrid = 0 - #print xgrid, ygrid, i + #print(xgrid, ygrid, i) ValueCell[i] = Value[ygrid,xgrid] * (x[xgrid+1] - xCell[i]) * (y[ygrid+1] - yCell[i]) / (dx * dy) + \ Value[ygrid+1,xgrid] * (x[xgrid+1] - xCell[i]) * (yCell[i] - y[ygrid]) / (dx * dy) + \ Value[ygrid,xgrid+1] * (xCell[i] - x[xgrid]) * (y[ygrid+1] - yCell[i]) / (dx * dy) + \ - Value[ygrid+1,xgrid+1] * (xCell[i] - x[xgrid]) * (yCell[i] - y[ygrid]) / (dx * dy) + Value[ygrid+1,xgrid+1] * (xCell[i] - x[xgrid]) * (yCell[i] - y[ygrid]) / (dx * dy) return ValueCell #---------------------------- @@ -124,31 +127,31 @@ def delaunay_interp_weights(xy, uv, d=2): uv = output (MPSALI) x,y coords ''' - #print "scipy version=", scipy.version.full_version + #print("scipy version=", scipy.version.full_version) if xy.shape[0] > 2**24-1: - print "WARNING: The source file contains more than 2^24-1 (16,777,215) points due to a limitation in older versions of Qhull (see: https://mail.scipy.org/pipermail/scipy-user/2015-June/036598.html). Delaunay creation may fail if Qhull being linked by scipy.spatial is older than v2015.0.1 2015/8/31." + print("WARNING: The source file contains more than 2^24-1 (16,777,215) points due to a limitation in older versions of Qhull (see: https://mail.scipy.org/pipermail/scipy-user/2015-June/036598.html). Delaunay creation may fail if Qhull being linked by scipy.spatial is older than v2015.0.1 2015/8/31.") tri = scipy.spatial.Delaunay(xy) - print " Delaunay triangulation complete." + print(" Delaunay triangulation complete.") simplex = tri.find_simplex(uv) - print " find_simplex complete." + print(" find_simplex complete.") vertices = np.take(tri.simplices, simplex, axis=0) - print " identified vertices." + print(" identified vertices.") temp = np.take(tri.transform, simplex, axis=0) - print " np.take complete." + print(" np.take complete.") delta = uv - temp[:, d] bary = np.einsum('njk,nk->nj', temp[:, :d, :], delta) - print " calculating bary complete." + print(" calculating bary complete.") wts = np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True))) # Now figure out if there is any extrapolation. # Find indices to points of output file that are outside of convex hull of input points outsideInd = np.nonzero(tri.find_simplex(uv)<0) outsideCoords = uv[outsideInd] - #print outsideInd + #print(outsideInd) nExtrap = len(outsideInd[0]) if nExtrap > 0: - print " Found {} points requiring extrapolation. Using nearest neighbor extrapolation for those.".format(nExtrap) + print(" Found {} points requiring extrapolation. Using nearest neighbor extrapolation for those.".format(nExtrap)) # Now find nearest neighbor for each outside point # Use KDTree of input points @@ -158,6 +161,18 @@ def delaunay_interp_weights(xy, uv, d=2): #---------------------------- +def nn_interp_weights(xy, uv, d=2): + ''' + xy = input x,y coords + uv = output (MPSALI) x,y coords + Note: could separate out building tree and interpolation for efficiency if many fields need to be processed + ''' + tree = scipy.spatial.cKDTree(xy) + dist,idx = tree.query(uv, k=1) # k is the number of nearest neighbors. +# outfield = values.flatten()[idx] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + return idx +#---------------------------- + def delaunay_interpolate(values, gridType): if gridType == 'x0': vtx = vtx0; wts = wts0 @@ -208,29 +223,37 @@ def interpolate_field(MPASfieldName): else: InputField = inputFile.variables[InputFieldName][:] - print ' Input field %s min/max:'%InputFieldName, InputField.min(), InputField.max() + print(' Input field {} min/max: {} {}'.format(InputFieldName, InputField.min(), InputField.max())) # Call the appropriate routine for actually doing the interpolation if options.interpType == 'b': - print " ...Interpolating to %s using built-in bilinear method..." % MPASfieldName + print(" ...Interpolating to {} using built-in bilinear method...".format(MPASfieldName)) MPASfield = BilinearInterp(InputField, fieldInfo[MPASfieldName]['gridType']) elif options.interpType == 'd': - print " ...Interpolating to %s using barycentric method..." % MPASfieldName + print(" ...Interpolating to {} using barycentric method...".format(MPASfieldName)) MPASfield = delaunay_interpolate(InputField, fieldInfo[MPASfieldName]['gridType']) + elif options.interpType == 'n': + print(" ...Interpolating to {} using nearest neighbor method...".format(MPASfieldName)) + if fieldInfo[MPASfieldName]['gridType'] == 'x0': + MPASfield = InputField.flatten()[nn_idx_x0] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + elif fieldInfo[MPASfieldName]['gridType'] == 'x1': + MPASfield = InputField.flatten()[nn_idx_x1] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + elif fieldInfo[MPASfieldName]['gridType'] == 'cell': + MPASfield = InputField.flatten()[nn_idx_cell] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. elif options.interpType == 'e': - print " ...Interpolating to %s using ESMF-weights method..." % MPASfieldName + print(" ...Interpolating to {} using ESMF-weights method...".format(MPASfieldName)) MPASfield = ESMF_interp(InputField) else: sys.exit('ERROR: Unknown interpolation method specified') - print ' interpolated MPAS %s min/max:'%MPASfieldName, MPASfield.min(), MPASfield.max() + print(' interpolated MPAS {} min/max: {} {}'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) if fieldInfo[MPASfieldName]['scalefactor'] != 1.0: MPASfield *= fieldInfo[MPASfieldName]['scalefactor'] - print ' scaled MPAS %s min/max:'%MPASfieldName, MPASfield.min(), MPASfield.max() + print(' scaled MPAS {} min/max: {} {}'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) if fieldInfo[MPASfieldName]['offset'] != 0.0: MPASfield += fieldInfo[MPASfieldName]['offset'] - print ' offset MPAS %s min/max:'%MPASfieldName, MPASfield.min(), MPASfield.max() + print(' offset MPAS {} min/max: {} {}'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) return MPASfield @@ -272,58 +295,66 @@ def interpolate_field_with_layers(MPASfieldName): for z in range(inputVerticalDimSize): if filetype=='cism': - print ' Input layer %s, layer %s min/max:'%(z,InputFieldName), InputField[z,:,:].min(), InputField[z,:,:].max() + print(' Input layer {}, layer {} min/max: {} {}'.format(z, InputFieldName, InputField[z,:,:].min(), InputField[z,:,:].max())) elif filetype=='mpas': - print ' Input layer %s, layer %s min/max:'%(z,InputFieldName), InputField[:,z].min(), InputField[z,:].max() + print(' Input layer {}, layer {} min/max: {} {}'.format(z, InputFieldName, InputField[:,z].min(), InputField[z,:].max())) # Call the appropriate routine for actually doing the interpolation if options.interpType == 'b': - print " ...Layer %s, Interpolating this layer to MPAS grid using built-in bilinear method..." % (z) + print(" ...Layer {}, Interpolating this layer to MPAS grid using built-in bilinear method...".format(z)) mpas_grid_input_layers[z,:] = BilinearInterp(InputField[z,:,:], fieldInfo[MPASfieldName]['gridType']) elif options.interpType == 'd': - print " ...Layer %s, Interpolating this layer to MPAS grid using built-in barycentric method..." % (z) + print(" ...Layer {}, Interpolating this layer to MPAS grid using built-in barycentric method...".format(z)) if filetype=='cism': mpas_grid_input_layers[z,:] = delaunay_interpolate(InputField[z,:,:], fieldInfo[MPASfieldName]['gridType']) elif filetype=='mpas': mpas_grid_input_layers[z,:] = delaunay_interpolate(InputField[:,z], fieldInfo[MPASfieldName]['gridType']) + elif options.interpType == 'n': + print(" ...Layer {}, Interpolating this layer to MPAS grid using nearest neighbor method...".format(z)) + if fieldInfo[MPASfieldName]['gridType'] == 'x0': + mpas_grid_input_layers[z,:] = InputField[z,:,:].flatten()[nn_idx_x0] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + elif fieldInfo[MPASfieldName]['gridType'] == 'x1': + mpas_grid_input_layers[z,:] = InputField[z,:,:].flatten()[nn_idx_x1] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. + elif fieldInfo[MPASfieldName]['gridType'] == 'cell': + mpas_grid_input_layers[z,:] = InputField[:,z].flatten()[nn_idx_cell] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. elif options.interpType == 'e': - print " ...Layer %s, Interpolating this layer to MPAS grid using ESMF-weights method..." % (z) + print(" ...Layer{}, Interpolating this layer to MPAS grid using ESMF-weights method...".format(z)) mpas_grid_input_layers[z,:] = ESMF_interp(InputField[z,:,:]) else: sys.exit('ERROR: Unknown interpolation method specified') - print ' interpolated MPAS %s, layer %s min/max:'%(MPASfieldName, z), mpas_grid_input_layers[z,:].min(), mpas_grid_input_layers[z,:].max() + print(' interpolated MPAS {}, layer {} min/max {} {}: '.format(MPASfieldName, z, mpas_grid_input_layers[z,:].min(), mpas_grid_input_layers[z,:].max())) if fieldInfo[MPASfieldName]['scalefactor'] != 1.0: mpas_grid_input_layers *= fieldInfo[MPASfieldName]['scalefactor'] - print ' scaled MPAS %s on CISM vertical layers, min/max:'%MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max() + print(' scaled MPAS {} on CISM vertical layers, min/max: {} {}'.format(MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max())) if fieldInfo[MPASfieldName]['offset'] != 0.0: mpas_grid_input_layers += fieldInfo[MPASfieldName]['offset'] - print ' offset MPAS %s on CISM vertical layers, min/max:'%MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max() + print(' offset MPAS {} on CISM vertical layers, min/max: {} {}'.format(MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max())) # ------------ # Now interpolate vertically - print " Input layer field {} has layers: {}".format(inputFile.variables[InputFieldName].dimensions[1], input_layers) - print " MPAS layer centers are: {}".format(mpasLayerCenters) + print(" Input layer field {} has layers: {}".format(inputFile.variables[InputFieldName].dimensions[1], input_layers)) + print(" MPAS layer centers are: {}".format(mpasLayerCenters)) if input_layers.min() > mpasLayerCenters.min(): # This fix ensures that interpolation is done when input_layers.min is very slightly greater than mpasLayerCenters.min if input_layers.min() - 1.0e-6 < mpasLayerCenters.min(): - print 'input_layers.min =', '{0:.16f}'.format(input_layers.min()) - print 'mpasLayerCenters.min =', '{0:.16f}'.format(mpasLayerCenters.min()) + print('input_layers.min = {0:.16f}'.format(input_layers.min())) + print('mpasLayerCenters.min = {0:.16f}'.format(mpasLayerCenters.min())) input_layers[0] = input_layers[0] - 1.0e-6 - print 'New input_layers.min =', '{0:.16f}'.format(input_layers.min()) + print('New input_layers.min = {0:.16f}'.format(input_layers.min())) else: - print "WARNING: input_layers.min() > mpasLayerCenters.min() Values at the first level of input_layers will be used for all MPAS layers in this region!" + print("WARNING: input_layers.min() > mpasLayerCenters.min() Values at the first level of input_layers will be used for all MPAS layers in this region!") if input_layers.max() < mpasLayerCenters.max(): # This fix ensures that interpolation is done when input_layers.max is very slightly smaller than mpasLayerCenters.max if input_layers.max() + 1.0e-6 > mpasLayerCenters.min(): - print 'input_layers.max =', '{0:.16f}'.format(input_layers.max()) - print 'mpasLayerCenters.max =', '{0:.16f}'.format(mpasLayerCenters.max()) + print('input_layers.max = {0:.16f}'.format(input_layers.max())) + print('mpasLayerCenters.max = {0:.16f}'.format(mpasLayerCenters.max())) input_layers[inputVerticalDimSize-1] = input_layers[inputVerticalDimSize-1] + 1.0e-6 - print 'New input_layers.max =', '{0:.16f}'.format(input_layers.max()) - print 'input_layers = {}'.format(input_layers) + print('New input_layers.max = {0:.16f}'.format(input_layers.max())) + print('input_layers = {}'.format(input_layers)) else: - print "WARNING: input_layers.max() < mpasLayerCenters.max() Values at the last level of input_layers will be used for all MPAS layers in this region!" + print("WARNING: input_layers.max() < mpasLayerCenters.max() Values at the last level of input_layers will be used for all MPAS layers in this region!") MPASfield = vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers) - print ' MPAS %s on MPAS vertical layers, min/max of all layers:'%MPASfieldName, MPASfield.min(), MPASfield.max() + print(' MPAS {} on MPAS vertical layers, min/max of all layers:'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) del mpas_grid_input_layers @@ -345,17 +376,18 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): -print "==================" -print 'Gathering coordinate information from input and output files.' +print("==================") +print('Gathering coordinate information from input and output files.') # Open the output file, get needed dimensions & variables try: MPASfile = netCDF4.Dataset(options.mpasFile,'r+') + MPASfile.set_auto_mask(False) try: nVertLevels = len(MPASfile.dimensions['nVertLevels']) except: - print 'Output file is missing the dimension nVertLevels. Might not be a problem.' + print('Output file is missing the dimension nVertLevels. Might not be a problem.') try: # 1d vertical fields @@ -365,25 +397,26 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): mpasLayerCenters[0] = 0.5 * layerThicknessFractions[0] for k in range(nVertLevels)[1:]: # skip the first level mpasLayerCenters[k] = mpasLayerCenters[k-1] + 0.5 * layerThicknessFractions[k-1] + 0.5 * layerThicknessFractions[k] - print " Using MPAS layer centers at sigma levels: {}".format(mpasLayerCenters) + print(" Using MPAS layer centers at sigma levels: {}".format(mpasLayerCenters)) except: - print 'Output file is missing the variable layerThicknessFractions. Might not be a problem.' + print('Output file is missing the variable layerThicknessFractions. Might not be a problem.') # '2d' spatial fields on cell centers xCell = MPASfile.variables['xCell'][:] - #print 'xCell min/max:', xCell.min(), xCell.max() + #print('xCell min/max:', xCell.min(), xCell.max() yCell = MPASfile.variables['yCell'][:] - #print 'yCell min/max:', yCell.min(), yCell.max() + #print('yCell min/max:', yCell.min(), yCell.max() nCells = len(MPASfile.dimensions['nCells']) except: sys.exit('Error: The output grid file specified is either missing or lacking needed dimensions/variables.') -print "==================\n" +print("==================\n") # Open the input file, get needed dimensions inputFile = netCDF4.Dataset(options.inputFile,'r') +inputFile.set_auto_mask(False) # Figure out if this is CISM or MPAS if 'x1' in inputFile.variables: @@ -398,48 +431,48 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): try: level = len(inputFile.dimensions['level']) except: - print ' Input file is missing the dimension level. Might not be a problem.' + print(' Input file is missing the dimension level. Might not be a problem.') try: stagwbndlevel = len(inputFile.dimensions['stagwbndlevel']) except: - print ' Input file is missing the dimension stagwbndlevel. Might not be a problem.' + print(' Input file is missing the dimension stagwbndlevel. Might not be a problem.') # Get CISM location variables if they exist try: x1 = inputFile.variables['x1'][:] dx1 = x1[1] - x1[0] - #print 'x1 min/max/dx:', x1.min(), x1.max(), dx1 + #print('x1 min/max/dx:', x1.min(), x1.max(), dx1 y1 = inputFile.variables['y1'][:] dy1 = y1[1] - y1[0] - #print 'y1 min/max/dx:', y1.min(), y1.max(), dy1 + #print('y1 min/max/dx:', y1.min(), y1.max(), dy1 ##x1 = x1 - (x1.max()-x1.min())/2.0 # This was for some shifted CISM grid but should not be used in general. ##y1 = y1 - (y1.max()-y1.min())/2.0 except: - print ' Input file is missing x1 and/or y1. Might not be a problem.' + print(' Input file is missing x1 and/or y1. Might not be a problem.') try: x0 = inputFile.variables['x0'][:] - #print 'x0 min/max:', x0.min(), x0.max() + #print('x0 min/max:', x0.min(), x0.max() y0 = inputFile.variables['y0'][:] - #print 'y0 min/max:', y0.min(), y0.max() + #print('y0 min/max:', y0.min(), y0.max() ##x0 = x0 - (x0.max()-x0.min())/2.0 ##y0 = y0 - (y0.max()-y0.min())/2.0 except: - print ' Input file is missing x0 and/or y0. Might not be a problem.' + print(' Input file is missing x0 and/or y0. Might not be a problem.') # Check the overlap of the grids - print '==================' - print 'CISM Input File extents:' - print ' x1 min, max: ', x1.min(), x1.max() - print ' y1 min, max: ', y1.min(), y1.max() - print 'MPAS File extents:' - print ' xCell min, max: ', xCell.min(), xCell.max() - print ' yCell min, max: ', yCell.min(), yCell.max() - print '==================' + print('==================') + print('CISM Input File extents:') + print(' x1 min, max: {} {}'.format(x1.min(), x1.max())) + print(' y1 min, max: {} {}'.format(y1.min(), y1.max())) + print('MPAS File extents:') + print(' xCell min, max: {} {}'.format(xCell.min(), xCell.max())) + print(' yCell min, max: {} {}'.format(yCell.min(), yCell.max())) + print('==================') elif filetype == 'mpas': @@ -447,12 +480,12 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): try: nVertLevels = len(inputFile.dimensions['nVertLevels']) except: - print ' Input file is missing the dimension nVertLevels. Might not be a problem.' + print(' Input file is missing the dimension nVertLevels. Might not be a problem.') #try: # nVertInterfaces = len(inputFile.dimensions['nVertInterfaces']) #except: - # print ' Input file is missing the dimension nVertInterfaces. Might not be a problem.' + # print(' Input file is missing the dimension nVertInterfaces. Might not be a problem.' # Get MPAS location variables if they exist try: @@ -463,14 +496,14 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): # Check the overlap of the grids - print '==================' - print 'Input MPAS File extents:' - print ' xCell min, max: ', inputxCell.min(), inputxCell.max() - print ' yCell min, max: ', inputyCell.min(), inputyCell.max() - print 'Output MPAS File extents:' - print ' xCell min, max: ', xCell.min(), xCell.max() - print ' yCell min, max: ', yCell.min(), yCell.max() - print '==================' + print('==================') + print('Input MPAS File extents:') + print(' xCell min, max: {} {}'.format(inputxCell.min(), inputxCell.max())) + print(' yCell min, max: {} {}'.format(inputyCell.min(), inputyCell.max())) + print('Output MPAS File extents:') + print(' xCell min, max: {} {}'.format(xCell.min(), xCell.max())) + print(' yCell min, max: {} {}'.format(yCell.min(), yCell.max())) + print('==================') if filetype=='mpas' and not options.interpType == 'd': @@ -487,12 +520,12 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): cismXY1[:,0] = Yi.flatten() cismXY1[:,1] = Xi.flatten() - print '\nBuilding interpolation weights: CISM x1/y1 -> MPAS' + print('\nBuilding interpolation weights: CISM x1/y1 -> MPAS') start = time.clock() vtx1, wts1, outsideIndx1, treex1 = delaunay_interp_weights(cismXY1, mpasXY) if len(outsideIndx1) > 0: outsideIndx1 = outsideIndx1[0] # get the list itself - end = time.clock(); print 'done in ', end-start + end = time.clock(); print('done in {}'.format(end-start)) if 'x0' in inputFile.variables and not options.thicknessOnly: # Need to setup separate weights for this grid @@ -501,19 +534,55 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): cismXY0[:,0] = Yi.flatten() cismXY0[:,1] = Xi.flatten() - print 'Building interpolation weights: CISM x0/y0 -> MPAS' + print('Building interpolation weights: CISM x0/y0 -> MPAS') start = time.clock() vtx0, wts0, outsideIndx0, treex0 = delaunay_interp_weights(cismXY0, mpasXY) if len(outsideIndx0) > 0: outsideIndx0 = outsideIndx0[0] # get the list itself - end = time.clock(); print 'done in ', end-start + end = time.clock(); print('done in {}'.format(end-start)) elif filetype=='mpas': inputmpasXY= np.vstack((inputxCell[:], inputyCell[:])).transpose() - print 'Building interpolation weights: MPAS in -> MPAS out' + print('Building interpolation weights: MPAS in -> MPAS out') start = time.clock() vtCell, wtsCell, outsideIndcell, treecell = delaunay_interp_weights(inputmpasXY, mpasXY) - end = time.clock(); print 'done in ', end-start + end = time.clock(); print('done in {}'.format(end-start)) + +#---------------------------- +# Setup NN interpolation weights if needed +if options.interpType == 'n': + mpasXY = np.vstack((xCell[:], yCell[:])).transpose() + + if filetype=='cism': + [Yi,Xi] = np.meshgrid(x1[:], y1[:]) + cismXY1 = np.zeros([Xi.shape[0]*Xi.shape[1],2]) + cismXY1[:,0] = Yi.flatten() + cismXY1[:,1] = Xi.flatten() + + print('\nBuilding interpolation weights: CISM x1/y1 -> MPAS') + start = time.clock() + nn_idx_x1 = nn_interp_weights(cismXY1, mpasXY) + end = time.clock(); print('done in {}'.format(end-start)) + + if 'x0' in inputFile.variables and not options.thicknessOnly: + # Need to setup separate weights for this grid + [Yi,Xi] = np.meshgrid(x0[:], y0[:]) + cismXY0 = np.zeros([Xi.shape[0]*Xi.shape[1],2]) + cismXY0[:,0] = Yi.flatten() + cismXY0[:,1] = Xi.flatten() + + print('Building interpolation weights: CISM x0/y0 -> MPAS') + start = time.clock() + nn_idx_x0 = nn_interp_weights(cismXY0, mpasXY) + end = time.clock(); print('done in {}'.format(end-start)) + + elif filetype=='mpas': + inputmpasXY= np.vstack((inputxCell[:], inputyCell[:])).transpose() + print('Building interpolation weights: MPAS in -> MPAS out') + start = time.clock() + nn_idx_cell = nn_interp_weights(inputmpasXY, mpasXY) + end = time.clock(); print('done in {}'.format(end-start)) + #---------------------------- # Map Input-Output field names - add new fields here as needed @@ -525,7 +594,7 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): if not options.thicknessOnly: fieldInfo['bedTopography'] = {'InputName':'topg', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} fieldInfo['sfcMassBal'] = {'InputName':'acab', 'scalefactor':910.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} # Assuming default CISM density - fieldInfo['floatingBasalMassBal'] = {'InputName':'bmb', 'scalefactor':910.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} # Assuming default CISM density + fieldInfo['floatingBasalMassBal'] = {'InputName':'subm', 'scalefactor':910.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} # Assuming default CISM density #fieldInfo['temperature'] = {'InputName':'temp', 'scalefactor':1.0, 'offset':273.15, 'gridType':'x1', 'vertDim':True} fieldInfo['temperature'] = {'InputName':'tempstag', 'scalefactor':1.0, 'offset':273.15, 'gridType':'x1', 'vertDim':True} # pick one or the other fieldInfo['basalHeatFlux'] = {'InputName':'bheatflx', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} @@ -535,10 +604,13 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): # fields for observed surface speed and associated error, observed thickness change fieldInfo['observedSurfaceVelocityX'] = {'InputName':'vx', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} fieldInfo['observedSurfaceVelocityY'] = {'InputName':'vy', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['observedSurfaceVelocityUncertainty'] = {'InputName':'verr', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} + fieldInfo['observedSurfaceVelocityUncertainty'] = {'InputName':'vErr', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} fieldInfo['observedThicknessTendency'] = {'InputName':'dHdt', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} fieldInfo['observedThicknessTendencyUncertainty'] = {'InputName':'dHdtErr', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['thicknessUncertainty'] = {'InputName':'thkerr', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} + fieldInfo['thicknessUncertainty'] = {'InputName':'thkErr', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} + + fieldInfo['ismip6shelfMelt_basin'] = {'InputName':'ismip6shelfMelt_basin', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} + fieldInfo['ismip6shelfMelt_deltaT'] = {'InputName':'ismip6shelfMelt_deltaT', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} elif filetype=='mpas': @@ -561,18 +633,18 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): #---------------------------- - + #---------------------------- # try each field. If it exists in the input file, it will be copied. If not, it will be skipped. for MPASfieldName in fieldInfo: - print '\n## %s ##'%MPASfieldName + print('\n## {} ##'.format(MPASfieldName)) if not MPASfieldName in MPASfile.variables: - print " Warning: Field '{}' is not in the destination file. Skipping.".format(MPASfieldName) + print(" Warning: Field '{}' is not in the destination file. Skipping.".format(MPASfieldName)) continue # skip the rest of this iteration of the for loop over variables if not fieldInfo[MPASfieldName]['InputName'] in inputFile.variables: - print " Warning: Field '{}' is not in the source file. Skipping.".format(fieldInfo[MPASfieldName]['InputName']) + print(" Warning: Field '{}' is not in the source file. Skipping.".format(fieldInfo[MPASfieldName]['InputName'])) continue # skip the rest of this iteration of the for loop over variables start = time.clock() @@ -580,12 +652,12 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): MPASfield = interpolate_field_with_layers(MPASfieldName) else: MPASfield = interpolate_field(MPASfieldName) - end = time.clock(); print ' interpolation done in ', end-start + end = time.clock(); print(' interpolation done in {}'.format(end-start)) # Don't allow negative thickness. if MPASfieldName == 'thickness' and MPASfield.min() < 0.0: MPASfield[MPASfield < 0.0] = 0.0 - print ' removed negative thickness, new min/max:', MPASfield.min(), MPASfield.max() + print(' removed negative thickness, new min/max: {} {}'.format(MPASfield.min(), MPASfield.max())) # Now insert the MPAS field into the file. if 'Time' in MPASfile.variables[MPASfieldName].dimensions: @@ -607,4 +679,4 @@ def vertical_interp_MPAS_grid(mpas_grid_input_layers, input_layers): inputFile.close() MPASfile.close() -print '\nInterpolation completed.' +print('\nInterpolation completed.') diff --git a/grid_gen/landice_grid_tools/mark_domain_boundaries_dirichlet.py b/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py similarity index 80% rename from grid_gen/landice_grid_tools/mark_domain_boundaries_dirichlet.py rename to landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py index 3481b529b..bd826aabd 100755 --- a/grid_gen/landice_grid_tools/mark_domain_boundaries_dirichlet.py +++ b/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py @@ -3,13 +3,16 @@ This script marks all of the boundary cells in a domain as Dirichlet velocity boundaries. ''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import netCDF4 import numpy as np from optparse import OptionParser from datetime import datetime import sys -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)\n" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)\n") parser = OptionParser() parser.description = __doc__ parser.add_option("-f", "--file", dest="inputFile", help="name of file to be modified.", default="landice_grid.nc", metavar="FILENAME") @@ -20,8 +23,8 @@ options, args = parser.parse_args() -print " Input file: " + options.inputFile -print " Time level: {}".format(options.time) +print(" Input file: {}".format(options.inputFile)) +print(" Time level: {}".format(options.time)) f=netCDF4.Dataset(options.inputFile, 'r+') nCells = len(f.dimensions['nCells']) @@ -47,4 +50,4 @@ f.close() -print '\nMarking boundary cells completed.' +print('\nMarking boundary cells completed.') diff --git a/grid_gen/landice_grid_tools/README_grid_generation_workflow.txt b/landice/mesh_tools_li/misc/README_grid_generation_workflow.txt similarity index 100% rename from grid_gen/landice_grid_tools/README_grid_generation_workflow.txt rename to landice/mesh_tools_li/misc/README_grid_generation_workflow.txt diff --git a/grid_gen/landice_grid_tools/calibrate_beta.py b/landice/mesh_tools_li/misc/calibrate_beta.py similarity index 100% rename from grid_gen/landice_grid_tools/calibrate_beta.py rename to landice/mesh_tools_li/misc/calibrate_beta.py diff --git a/grid_gen/landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py b/landice/mesh_tools_li/misc/copy_etopo_to_MPAS_sphere_grid.py similarity index 100% rename from grid_gen/landice_grid_tools/copy_etopo_to_MPAS_sphere_grid.py rename to landice/mesh_tools_li/misc/copy_etopo_to_MPAS_sphere_grid.py diff --git a/grid_gen/landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh b/landice/mesh_tools_li/misc/mpas_mesh_to_landice_ic_batch.sh similarity index 100% rename from grid_gen/landice_grid_tools/mpas_mesh_to_landice_ic_batch.sh rename to landice/mesh_tools_li/misc/mpas_mesh_to_landice_ic_batch.sh diff --git a/grid_gen/landice_grid_tools/prepare_pattyn_temperature_field_for_interpolation.m b/landice/mesh_tools_li/misc/prepare_pattyn_temperature_field_for_interpolation.m similarity index 100% rename from grid_gen/landice_grid_tools/prepare_pattyn_temperature_field_for_interpolation.m rename to landice/mesh_tools_li/misc/prepare_pattyn_temperature_field_for_interpolation.m diff --git a/grid_gen/landice_grid_tools/convert_landice_bitmasks.py b/landice/output_processing_li/convert_landice_bitmasks.py similarity index 88% rename from grid_gen/landice_grid_tools/convert_landice_bitmasks.py rename to landice/output_processing_li/convert_landice_bitmasks.py index 0b9a5a9f0..b4ecec846 100755 --- a/grid_gen/landice_grid_tools/convert_landice_bitmasks.py +++ b/landice/output_processing_li/convert_landice_bitmasks.py @@ -3,6 +3,8 @@ Script to convert landice bit mask into individual masks for each bit and save them to the netcdf file. Converts any of cellMask, edgeMask, vertexMask present in file. ''' +from __future__ import absolute_import, division, print_function, unicode_literals + import numpy from netCDF4 import Dataset @@ -32,7 +34,7 @@ } -print "** Gathering information." +print("** Gathering information.") parser = OptionParser() parser.add_option("-f", "--filename", dest="filename", help="file to visualize; default: output.nc", default="output.nc", metavar="FILE") options, args = parser.parse_args() @@ -50,9 +52,9 @@ else: newMaskVar = inFile.createVariable(varName, 'i', ('Time','nCells')) for t in range(nTime): - newMaskVar[t,:] = (inFile.variables['cellMask'][t,:] & masks[maskName]) / masks[maskName] + newMaskVar[t,:] = (inFile.variables['cellMask'][t,:] & masks[maskName]) // masks[maskName] inFile.sync() - print "cellMask converted to individual masks." + print("cellMask converted to individual masks.") if 'edgeMask' in inFile.variables: for maskName in masks: @@ -62,9 +64,9 @@ else: newMaskVar = inFile.createVariable(varName, 'i', ('Time','nEdges')) for t in range(nTime): - newMaskVar[t,:] = (inFile.variables['edgeMask'][t,:] & masks[maskName]) / masks[maskName] + newMaskVar[t,:] = (inFile.variables['edgeMask'][t,:] & masks[maskName]) // masks[maskName] inFile.sync() - print "edgeMask converted to individual masks." + print("edgeMask converted to individual masks.") if 'vertexMask' in inFile.variables: for maskName in masks: @@ -74,9 +76,9 @@ else: newMaskVar = inFile.createVariable(varName, 'i', ('Time','nVertices')) for t in range(nTime): - newMaskVar[t,:] = (inFile.variables['vertexMask'][t,:] & masks[maskName]) / masks[maskName] + newMaskVar[t,:] = (inFile.variables['vertexMask'][t,:] & masks[maskName]) // masks[maskName] inFile.sync() - print "vertexMask converted to individual masks." + print("vertexMask converted to individual masks.") inFile.close() diff --git a/grid_gen/landice_grid_tools/plot_globalStats.py b/landice/output_processing_li/plot_globalStats.py similarity index 91% rename from grid_gen/landice_grid_tools/plot_globalStats.py rename to landice/output_processing_li/plot_globalStats.py index d69c83158..43110602f 100755 --- a/grid_gen/landice_grid_tools/plot_globalStats.py +++ b/landice/output_processing_li/plot_globalStats.py @@ -3,6 +3,8 @@ Script to plot common time-series from one or more landice globalStats files. ''' +from __future__ import absolute_import, division, print_function, unicode_literals + import sys import numpy as np import numpy.ma as ma @@ -13,7 +15,7 @@ rhoi = 910.0 -print "** Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("** Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser(description=__doc__) parser.add_option("-1", dest="file1inName", help="input filename", default="globalStats.nc", metavar="FILENAME") parser.add_option("-2", dest="file2inName", help="input filename", metavar="FILENAME") @@ -22,7 +24,7 @@ parser.add_option("-u", dest="units", help="units for mass/volume: m3, kg, Gt", default="m3", metavar="FILENAME") options, args = parser.parse_args() -print "Using ice density of {} kg/m3 if required for unit conversions".format(rhoi) +print("Using ice density of {} kg/m3 if required for unit conversions".format(rhoi)) # create axes to plot into fig = plt.figure(1, figsize=(9, 11), facecolor='w') @@ -40,7 +42,7 @@ massUnit = "Gt" else: sys.exit("Unknown mass/volume units") -print "Using volume/mass units of: ", massUnit +print("Using volume/mass units of: ", massUnit) axVol = fig.add_subplot(nrow, ncol, 1) plt.xlabel('Year') @@ -94,7 +96,7 @@ def plotStat(fname): - print "Reading and plotting file: " + fname + print("Reading and plotting file: {}".format(fname)) name = fname @@ -167,8 +169,7 @@ def plotStat(fname): axCalvFlux.legend(loc='best', prop={'size': 6}) -print "Generating plot." +print("Generating plot.") fig.tight_layout() plt.show() - diff --git a/grid_gen/landice_grid_tools/plot_mass_balance.py b/landice/output_processing_li/plot_mass_balance.py similarity index 85% rename from grid_gen/landice_grid_tools/plot_mass_balance.py rename to landice/output_processing_li/plot_mass_balance.py index e5fcc8c64..c7ee81773 100755 --- a/grid_gen/landice_grid_tools/plot_mass_balance.py +++ b/landice/output_processing_li/plot_mass_balance.py @@ -4,7 +4,8 @@ Currently only assesses grounded ice sheet mass balance. ''' -import sys +from __future__ import absolute_import, division, print_function, unicode_literals + import numpy as np from netCDF4 import Dataset from optparse import OptionParser @@ -13,16 +14,16 @@ rhoi = 910.0 -print "** Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("** Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser(description=__doc__) parser.add_option("-f", dest="fileName", help="input filename", default="globalStats.nc", metavar="FILENAME") options, args = parser.parse_args() -print "Using ice density of {} kg/m3 if required for unit conversions".format(rhoi) +print("Using ice density of {} kg/m3 if required for unit conversions".format(rhoi)) -print "Mass balance will be inaccurate if not writing stats on every timestep." +print("Mass balance will be inaccurate if not writing stats on every timestep.") -print "Reading and plotting file: " + options.fileName +print("Reading and plotting file: {}".format(options.fileName)) f = Dataset(options.fileName,'r') yr = f.variables['daysSinceStart'][:]/365.0 dyr = np.zeros(yr.shape) @@ -83,7 +84,7 @@ plt.legend(loc='best', prop={'size': 6}) plt.tight_layout() -print "Plotting complete." +print("Plotting complete.") plt.show() f.close() diff --git a/landice/output_processing_li/remove_output_file_time_loops.py b/landice/output_processing_li/remove_output_file_time_loops.py new file mode 100755 index 000000000..aebb269fe --- /dev/null +++ b/landice/output_processing_li/remove_output_file_time_loops.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +''' +Script to remove repeated time entries in globalStats or other output file that occur +due to inexact restarts used in conjunction with the adpative timestepper. +Requires 'daysSinceStart' field is available. (Could be modified to use xtime instead) +''' + +from __future__ import absolute_import, division, print_function, unicode_literals + +import sys +from netCDF4 import Dataset +import numpy as np +import argparse + + +parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument("-f", "--file", dest="file", help="File to be cleaned.", metavar="FILE", default="globalStats.nc") +args = parser.parse_args() + +f = Dataset(args.file, 'r') +days = f.variables['daysSinceStart'][:] +nt = len(f.dimensions['Time']) + +keepInd = np.zeros((nt,)) +keepInd[:] = 1 # initialize to keep all days +prevMaxDay = days[0] +nLoops = 0 +for i in range(1,nt): + if days[i] < prevMaxDay: + # found a loop. + print("Found a time loop at index {}".format(i)) + nLoops += 1 + # We want the second instance of this time period, not the first + # So we need to "unkeep" all previous time slices before this one that have greater times + ind = np.where(days[:i] > days[i])[0] + keepInd[ind] = 0 + prevMaxDay = days[i] + +print("Found and repairing {} time loops. Keeping {} indices out of {}.".format(nLoops, int(keepInd.sum()), nt)) +keepList = np.nonzero(keepInd)[0] + +if int(keepInd.sum())==nt: + print("No cleaning required.") + sys.exit() + +# ----- continue processing if needed ----- + +# Copy all fields to a new file +fnameCleaned=args.file+".cleaned" +fileout = Dataset(fnameCleaned, 'w') + +for name in f.ncattrs(): + setattr(fileout, name, getattr(f, name) ) + print('Copied global attribute {} = {}'.format(name, getattr(f, name))) + +if hasattr(fileout, 'history'): + setattr(fileout, 'history', sys.argv[:] ) + +fileout.sync() + +print("---- Copying dimensions from input file to output file ----") +for dim in f.dimensions.keys(): + print(dim) + if dim == 'Time': + dimvalue = None # netCDF4 won't properly get this with the command below (you need to use the isunlimited method) + else: + dimvalue = len(f.dimensions[dim]) + fileout.createDimension(dim, dimvalue) +fileout.sync() + +print("---- Copying variables from input file to output file ----") +for varname in f.variables: + print(varname) + thevar = f.variables[varname] + newVar = fileout.createVariable(varname, thevar.dtype, thevar.dimensions) + if 'Time' in f.variables[varname].dimensions: + if 'Time' == f.variables[varname].dimensions[0]: + if len(f.variables[varname].dimensions) == 1: + newVar[:] = thevar[keepList] + else: + newVar[:] = thevar[keepList,:] + else: + sys.exit("Error: 'Time' is in dimension list for variable {}, but it is not the first dimension. Script needs improving to handle this case.".format(varname)) + else: + newVar[:] = thevar[:] +print("----") + +fileout.close() +f.close() + +print("Complete. Cleaned output written to {}".format(fnameCleaned)) diff --git a/grid_gen/create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py similarity index 100% rename from grid_gen/create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py rename to mesh_tools/create_SCRIP_files/create_SCRIP_file_from_CISM_mesh.py diff --git a/grid_gen/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py similarity index 61% rename from grid_gen/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py rename to mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py index 0dcd02d62..c2addb766 100755 --- a/grid_gen/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py +++ b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py @@ -9,31 +9,31 @@ from optparse import OptionParser -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser() parser.description = "This script takes an MPAS grid file and generates a SCRIP grid file." parser.add_option("-m", "--mpas", dest="mpasFile", help="MPAS grid file name used as input.", default="grid.nc", metavar="FILENAME") parser.add_option("-s", "--scrip", dest="scripFile", help="SCRIP grid file to output.", default="scrip.nc", metavar="FILENAME") parser.add_option("-l", "--landice", dest="landiceMasks", help="If flag is on, landice masks will be computed and used.", action="store_true") for option in parser.option_list: - if option.default != ("NO", "DEFAULT"): - option.help += (" " if option.help else "") + "[default: %default]" + if option.default != ("NO", "DEFAULT"): + option.help += (" " if option.help else "") + "[default: %default]" options, args = parser.parse_args() if not options.mpasFile: - sys.exit('Error: MPAS input grid file is required. Specify with -m command line argument.') + sys.exit('Error: MPAS input grid file is required. Specify with -m command line argument.') if not options.scripFile: - sys.exit('Error: SCRIP output grid file is required. Specify with -s command line argument.') + sys.exit('Error: SCRIP output grid file is required. Specify with -s command line argument.') if not options.landiceMasks: options.landiceMasks = False if options.landiceMasks: - print " -- Landice Masks are enabled" + print(" -- Landice Masks are enabled") else: - print " -- Landice Masks are disabled" + print(" -- Landice Masks are disabled") -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output # =============================================== @@ -52,6 +52,14 @@ maxVertices = len(fin.dimensions['maxEdges']) areaCell = fin.variables['areaCell'][:] sphereRadius = float(fin.sphere_radius) +on_a_sphere = str(fin.on_a_sphere) + + +if sphereRadius <= 0: + print(" -- WARNING: conservative remapping is NOT possible when 'sphereRadius' <= 0 because 'grid_area' field will be infinite (from division by 0)") + +if on_a_sphere == "NO": + print(" -- WARNING: 'on_a_sphere' attribute is 'NO', which means that there may be some disagreement regarding area between the planar (source) and spherical (target) mesh") if options.landiceMasks: landIceMask = fin.variables['landIceMask'][:] @@ -86,19 +94,19 @@ grid_corner_lon_local = np.zeros( (nCells, maxVertices) ) # It is WAYYY faster to fill in the array entry-by-entry in memory than to disk. grid_corner_lat_local = np.zeros( (nCells, maxVertices) ) for iCell in range(nCells): - vertexMax = nEdgesOnCell[iCell] - grid_corner_lat_local[iCell, 0:vertexMax] = latVertex[verticesOnCell[iCell, 0:vertexMax] - 1] - grid_corner_lon_local[iCell, 0:vertexMax] = lonVertex[verticesOnCell[iCell, 0:vertexMax] - 1] - if vertexMax < maxVertices: - # repeat the last vertex location for any remaining, unused vertex indices - grid_corner_lat_local[iCell, vertexMax:] = latVertex[verticesOnCell[iCell, vertexMax-1] - 1] - grid_corner_lon_local[iCell, vertexMax:] = lonVertex[verticesOnCell[iCell, vertexMax-1] - 1] + vertexMax = nEdgesOnCell[iCell] + grid_corner_lat_local[iCell, 0:vertexMax] = latVertex[verticesOnCell[iCell, 0:vertexMax] - 1] + grid_corner_lon_local[iCell, 0:vertexMax] = lonVertex[verticesOnCell[iCell, 0:vertexMax] - 1] + if vertexMax < maxVertices: + # repeat the last vertex location for any remaining, unused vertex indices + grid_corner_lat_local[iCell, vertexMax:] = latVertex[verticesOnCell[iCell, vertexMax-1] - 1] + grid_corner_lon_local[iCell, vertexMax:] = lonVertex[verticesOnCell[iCell, vertexMax-1] - 1] if options.landiceMasks: - # If landiceMasks are enabled, mask out ocean under landice. - grid_imask[iCell] = 1 - landIceMask[0, iCell] + # If landiceMasks are enabled, mask out ocean under landice. + grid_imask[iCell] = 1 - landIceMask[0, iCell] else: - grid_imask[iCell] = 1 # If landiceMasks are not enabled, don't mask anything out. + grid_imask[iCell] = 1 # If landiceMasks are not enabled, don't mask anything out. grid_corner_lat[:] = grid_corner_lat_local[:] grid_corner_lon[:] = grid_corner_lon_local[:] @@ -114,13 +122,13 @@ #plt.show() -print "Input latCell min/max values (radians):", latCell[:].min(), latCell[:].max() -print "Input lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max() -print "Calculated grid_center_lat min/max values (radians):", grid_center_lat[:].min(), grid_center_lat[:].max() -print "Calculated grid_center_lon min/max values (radians):", grid_center_lon[:].min(), grid_center_lon[:].max() -print "Calculated grid_area min/max values (sq radians):", grid_area[:].min(), grid_area[:].max() +print("Input latCell min/max values (radians):", latCell[:].min(), latCell[:].max()) +print("Input lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max()) +print("Calculated grid_center_lat min/max values (radians):", grid_center_lat[:].min(), grid_center_lat[:].max()) +print("Calculated grid_center_lon min/max values (radians):", grid_center_lon[:].min(), grid_center_lon[:].max()) +print("Calculated grid_area min/max values (sq radians):", grid_area[:].min(), grid_area[:].max()) fin.close() fout.close() -print "Creation of SCRIP file is complete." +print("Creation of SCRIP file is complete.") diff --git a/grid_gen/grid_rotate/Makefile b/mesh_tools/grid_rotate/Makefile similarity index 100% rename from grid_gen/grid_rotate/Makefile rename to mesh_tools/grid_rotate/Makefile diff --git a/grid_gen/grid_rotate/README b/mesh_tools/grid_rotate/README similarity index 100% rename from grid_gen/grid_rotate/README rename to mesh_tools/grid_rotate/README diff --git a/grid_gen/grid_rotate/grid_rotate.f90 b/mesh_tools/grid_rotate/grid_rotate.f90 similarity index 99% rename from grid_gen/grid_rotate/grid_rotate.f90 rename to mesh_tools/grid_rotate/grid_rotate.f90 index a7e13b6ed..1f84b3a0c 100644 --- a/grid_gen/grid_rotate/grid_rotate.f90 +++ b/mesh_tools/grid_rotate/grid_rotate.f90 @@ -83,6 +83,7 @@ subroutine main() real (kind=RKIND) :: cx, cy, cz character(220) :: copyCmd + integer :: copyStat call read_namelist(original_latitude_degrees, original_longitude_degrees, new_latitude_degrees, new_longitude_degrees, birdseye_rotation_counter_clockwise_degrees) @@ -98,7 +99,10 @@ subroutine main() ! Copy original file to output file copyCmd = "cp " // trim(filename) // " " // trim(newFilename) - call system(copyCmd) + copyStat = system(copyCmd) + if(copyStat /= 0) then + return ! If `cp` fails, let it report its error and exit + end if ! Make sure the output file is writeable copyCmd = "chmod u+w " // trim(newFilename) diff --git a/grid_gen/grid_rotate/mesh.ncl b/mesh_tools/grid_rotate/mesh.ncl similarity index 100% rename from grid_gen/grid_rotate/mesh.ncl rename to mesh_tools/grid_rotate/mesh.ncl diff --git a/grid_gen/grid_rotate/namelist.input b/mesh_tools/grid_rotate/namelist.input similarity index 100% rename from grid_gen/grid_rotate/namelist.input rename to mesh_tools/grid_rotate/namelist.input diff --git a/mesh_tools/merge_split_meshes/merge_grids.py b/mesh_tools/merge_split_meshes/merge_grids.py new file mode 120000 index 000000000..da21c28fa --- /dev/null +++ b/mesh_tools/merge_split_meshes/merge_grids.py @@ -0,0 +1 @@ +../../conda_package/mpas_tools/merge_grids.py \ No newline at end of file diff --git a/mesh_tools/merge_split_meshes/split_grids.py b/mesh_tools/merge_split_meshes/split_grids.py new file mode 120000 index 000000000..50ac14e26 --- /dev/null +++ b/mesh_tools/merge_split_meshes/split_grids.py @@ -0,0 +1 @@ +../../conda_package/mpas_tools/split_grids.py \ No newline at end of file diff --git a/mesh_tools/mesh_conversion_tools/CMakeLists.txt b/mesh_tools/mesh_conversion_tools/CMakeLists.txt new file mode 100644 index 000000000..c9312a12b --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/CMakeLists.txt @@ -0,0 +1,19 @@ +cmake_minimum_required (VERSION 3.0.2) +project (mesh_conversion_tools) + +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") + +include_directories (netcdf-cxx-4.2 .) + +set(SOURCES netcdf_utils.cpp netcdf-cxx-4.2/ncvalues.cpp netcdf-cxx-4.2/netcdf.cpp) + +add_executable (MpasMeshConverter.x mpas_mesh_converter.cpp ${SOURCES}) +target_link_libraries (MpasMeshConverter.x netcdf) + +add_executable (MpasCellCuller.x mpas_cell_culler.cpp ${SOURCES}) +target_link_libraries (MpasCellCuller.x netcdf) + +add_executable (MpasMaskCreator.x mpas_mask_creator.cpp jsoncpp.cpp ${SOURCES}) +target_link_libraries (MpasMaskCreator.x netcdf) + +install (TARGETS MpasMeshConverter.x MpasCellCuller.x MpasMaskCreator.x DESTINATION bin) diff --git a/grid_gen/mesh_conversion_tools/Makefile b/mesh_tools/mesh_conversion_tools/Makefile similarity index 57% rename from grid_gen/mesh_conversion_tools/Makefile rename to mesh_tools/mesh_conversion_tools/Makefile index 3028c1e48..d7cc6486e 100644 --- a/grid_gen/mesh_conversion_tools/Makefile +++ b/mesh_tools/mesh_conversion_tools/Makefile @@ -12,7 +12,7 @@ # In this file: # comment gnu, uncomment intel flags # change to: -# CC=CC +# CXX=CC # may need to unload parallel NetCDF and HDF5 libraries # to avoid g++ conflicts: # module unload python @@ -23,12 +23,12 @@ # make # gnu -CC=g++ -CFLAGS= -O3 -std=c++0x -fopenmp -lstdc++ -DFLAGS= -g -std=c++0x -D_DEBUG -fopenmp -lstdc++ +CXX ?= g++ +CFLAGS ?= -O3 -std=c++0x -fopenmp -lstdc++ +DFLAGS ?= -g -std=c++0x -D_DEBUG -fopenmp -lstdc++ # intel -# CC=icpc +# CXX=icpc # CFLAGS= -O3 -std=c++0x -qopenmp -lstdc++ # DFLAGS= -g -std=c++0x -D_DEBUG -qopenmp -lstdc++ @@ -38,31 +38,34 @@ MASK_EXECUTABLE= MpasMaskCreator.x ifneq (${NETCDF}, ) ifneq ($(shell which ${NETCDF}/bin/nc-config 2> /dev/null), ) - LIBS = $(shell ${NETCDF}/bin/nc-config --libs) -lnetcdf_c++ + LIBS = $(shell ${NETCDF}/bin/nc-config --libs) INCS = $(shell ${NETCDF}/bin/nc-config --cflags) else LIBS= -L${NETCDF}/lib - LIBS += -lnetcdf_c++ -lnetcdf + LIBS += -lnetcdf INCS = -I${NETCDF}/include endif else ifneq ($(shell which nc-config 2> /dev/null), ) - LIBS = $(shell nc-config --libs) -lnetcdf_c++ + LIBS = $(shell nc-config --libs) INCS = $(shell nc-config --cflags) else LIBS= -L${NETCDF}/lib - LIBS += -lnetcdf_c++ -lnetcdf + LIBS += -lnetcdf INCS = -I${NETCDF}/include endif +INCS += -Inetcdf-cxx-4.2/ +SRC = netcdf_utils.cpp netcdf-cxx-4.2/ncvalues.cpp netcdf-cxx-4.2/netcdf.cpp + all: - ${CC} mpas_mesh_converter.cpp netcdf_utils.cpp ${CFLAGS} -o ${CONV_EXECUTABLE} -I. ${INCS} ${LIBS} - ${CC} mpas_cell_culler.cpp netcdf_utils.cpp ${CFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} - ${CC} mpas_mask_creator.cpp netcdf_utils.cpp jsoncpp.cpp ${CFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} + ${CXX} mpas_mesh_converter.cpp ${SRC} ${CFLAGS} -o ${CONV_EXECUTABLE} -I. ${INCS} ${LIBS} + ${CXX} mpas_cell_culler.cpp ${SRC} ${CFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} + ${CXX} mpas_mask_creator.cpp ${SRC} jsoncpp.cpp ${CFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} debug: - ${CC} mpas_mesh_converter.cpp netcdf_utils.cpp ${DFLAGS} -o ${CONV_EXECUTABLE} ${INCS} ${LIBS} - ${CC} mpas_cell_culler.cpp netcdf_utils.cpp ${DFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} - ${CC} mpas_mask_creator.cpp netcdf_utils.cpp jsoncpp.cpp ${DFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} + ${CXX} mpas_mesh_converter.cpp ${SRC} ${DFLAGS} -o ${CONV_EXECUTABLE} ${INCS} ${LIBS} + ${CXX} mpas_cell_culler.cpp ${SRC} ${DFLAGS} -o ${CULL_EXECUTABLE} ${INCS} ${LIBS} + ${CXX} mpas_mask_creator.cpp ${SRC} jsoncpp.cpp ${DFLAGS} -o ${MASK_EXECUTABLE} -I. ${INCS} ${LIBS} clean: rm -f grid.nc diff --git a/grid_gen/mesh_conversion_tools/README b/mesh_tools/mesh_conversion_tools/README similarity index 100% rename from grid_gen/mesh_conversion_tools/README rename to mesh_tools/mesh_conversion_tools/README diff --git a/grid_gen/mesh_conversion_tools/edge.h b/mesh_tools/mesh_conversion_tools/edge.h similarity index 100% rename from grid_gen/mesh_conversion_tools/edge.h rename to mesh_tools/mesh_conversion_tools/edge.h diff --git a/grid_gen/mesh_conversion_tools/json/json-forwards.h b/mesh_tools/mesh_conversion_tools/json/json-forwards.h similarity index 100% rename from grid_gen/mesh_conversion_tools/json/json-forwards.h rename to mesh_tools/mesh_conversion_tools/json/json-forwards.h diff --git a/grid_gen/mesh_conversion_tools/json/json.h b/mesh_tools/mesh_conversion_tools/json/json.h similarity index 100% rename from grid_gen/mesh_conversion_tools/json/json.h rename to mesh_tools/mesh_conversion_tools/json/json.h diff --git a/grid_gen/mesh_conversion_tools/jsoncpp.cpp b/mesh_tools/mesh_conversion_tools/jsoncpp.cpp similarity index 100% rename from grid_gen/mesh_conversion_tools/jsoncpp.cpp rename to mesh_tools/mesh_conversion_tools/jsoncpp.cpp diff --git a/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py b/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py new file mode 100755 index 000000000..27b9c8715 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +''' +This script identifies "horns" on a mesh (cells with two or fewer neighbors), +and marks them for culling. In some cores/configurations, these weakly +connected cells can be dynamically inactive, and, therefore, undesirable to +keep in a mesh. + +The method used will work on both planar and spherical meshes. +It adds the new masked cell to an existing 'cullCell' field if it exists, +otherwise it creates a new field. +''' + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import sys +import numpy as np +import netCDF4 +from optparse import OptionParser +from datetime import datetime + + +print("== Gathering information. (Invoke with --help for more details. All " + "arguments are optional)\n") +parser = OptionParser() +parser.description = __doc__ +parser.add_option( + "-f", + "--file", + dest="inputFile", + help="Name of file to be processed.", + default="grid.nc", + metavar="FILENAME") +for option in parser.option_list: + if option.default != ("NO", "DEFAULT"): + option.help += (" " if option.help else "") + "[default: %default]" +options, args = parser.parse_args() + +print(" File to be modified: " + options.inputFile) + + +# Open file and get needed fields. +inputFile = netCDF4.Dataset(options.inputFile, 'r+') +nCells = len(inputFile.dimensions['nCells']) +cellsOnCell = inputFile.variables['cellsOnCell'][:] + +# Add the horn cells to existing mask if it exists +if 'cullCell' in inputFile.variables: + cullCell = inputFile.variables['cullCell'][:] +else: # otherwise make a new mask initialized empty + cullCell = np.zeros((nCells,)) # local variable + +nHorns = 0 +for i in range(nCells): + # NOTE: Can change this threshold, if needed for a particular use case. + if (cellsOnCell[i, :] > 0).sum() <= 2: + cullCell[i] = 1 + nHorns += 1 + +# Write out the new field +if 'cullCell' in inputFile.variables: + cullCellVar = inputFile.variables['cullCell'] +else: + cullCellVar = inputFile.createVariable('cullCell', 'i', ('nCells',)) +cullCellVar[:] = cullCell + + +# Update history attribute of netCDF file +thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + \ + ": " + " ".join(sys.argv[:]) +if hasattr(inputFile, 'history'): + newhist = '\n'.join([thiscommand, getattr(inputFile, 'history')]) +else: + newhist = thiscommand +setattr(inputFile, 'history', newhist) + +inputFile.close() + +print('\n{} "horn" locations have been marked in the field cullCell.'.format( + nHorns)) +print("Remember to use MpasCellCuller.x to actually remove them!") diff --git a/grid_gen/mesh_conversion_tools/mpas_cell_culler.cpp b/mesh_tools/mesh_conversion_tools/mpas_cell_culler.cpp similarity index 99% rename from grid_gen/mesh_conversion_tools/mpas_cell_culler.cpp rename to mesh_tools/mesh_conversion_tools/mpas_cell_culler.cpp index fc523cd55..87d62e9aa 100644 --- a/grid_gen/mesh_conversion_tools/mpas_cell_culler.cpp +++ b/mesh_tools/mesh_conversion_tools/mpas_cell_culler.cpp @@ -16,7 +16,7 @@ using namespace std; -enum { merge, invert, preserve }; +enum { mergeOp, invertOp, preserveOp }; int nCells, nVertices, nEdges, vertexDegree, maxEdges; bool spherical, periodic; @@ -155,13 +155,13 @@ int main ( int argc, char *argv[] ) { for ( int i = 3; i < argc; i+=2 ) { foundOperation = false; if (strcmp(argv[i], "-m") == 0 ) { - mask_ops.push_back(static_cast(merge)); + mask_ops.push_back(static_cast(mergeOp)); foundOperation = true; } else if ( strcmp(argv[i], "-i") == 0 ){ - mask_ops.push_back(static_cast(invert)); + mask_ops.push_back(static_cast(invertOp)); foundOperation = true; } else if ( strcmp(argv[i], "-p") == 0 ){ - mask_ops.push_back(static_cast(preserve)); + mask_ops.push_back(static_cast(preserveOp)); foundOperation = true; } else if ( strcmp(argv[i], "-c") == 0 ){ outputMap = true; @@ -471,8 +471,8 @@ int mergeCellMasks(const string masksFilename, const int maskOp){/*{{{*/ } } - if ( maskOp == invert || maskOp == merge ) { - if ( maskOp == invert ) { + if ( maskOp == invertOp || maskOp == mergeOp ) { + if ( maskOp == invertOp ) { for (i = 0; i < nCells; i++){ flattenedMask[i] = (flattenedMask[i] + 1) % 2; } @@ -481,7 +481,7 @@ int mergeCellMasks(const string masksFilename, const int maskOp){/*{{{*/ for ( i = 0; i < nCells; i++ ){ cullCell[i] = max(cullCell[i], flattenedMask[i]); } - } else if ( maskOp == preserve ) { + } else if ( maskOp == preserveOp ) { for ( i = 0; i < nCells; i++ ) { if ( flattenedMask[i] && cullCell[i] ) { cullCell[i] = 0; @@ -614,21 +614,21 @@ int outputGridDimensions( const string outputFilename ){/*{{{*/ int nCellsNew, nEdgesNew, nVerticesNew; // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Replace, NULL, 0, NcFile::Offset64Bits); /* for(vec_int_itr = edgesOnCell.begin(); vec_int_itr != edgesOnCell.end(); ++vec_int_itr){ - maxEdges = std::max(maxEdges, (int)(*vec_int_itr).size()); + maxEdges = std::max(maxEdges, (int)(*vec_int_itr).size()); }*/ - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // define dimensions NcDim *nCellsDim; NcDim *nEdgesDim; @@ -652,7 +652,7 @@ int outputGridDimensions( const string outputFilename ){/*{{{*/ for(int iEdge = 0; iEdge < nEdges; iEdge++){ nEdgesNew += (edgeMap.at(iEdge) != -1); } - + // write dimensions if (!(nCellsDim = grid.add_dim( "nCells", nCellsNew) )) return NC_ERR; if (!(nEdgesDim = grid.add_dim( "nEdges", nEdgesNew) )) return NC_ERR; @@ -662,7 +662,7 @@ int outputGridDimensions( const string outputFilename ){/*{{{*/ if (!(timeDim = grid.add_dim( "Time") )) return NC_ERR; grid.close(); - + // file closed when file obj goes out of scope return 0; }/*}}}*/ @@ -675,10 +675,10 @@ int outputGridAttributes( const string inputFilename, const string outputFilenam * **********************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); @@ -689,7 +689,7 @@ int outputGridAttributes( const string inputFilename, const string outputFilenam string history_str = ""; string id_str = ""; string parent_str = ""; - + // write attributes if(!spherical){ if (!(sphereAtt = grid.add_att( "on_a_sphere", "NO\0"))) return NC_ERR; @@ -734,7 +734,7 @@ int outputGridAttributes( const string inputFilename, const string outputFilenam if (!(id = grid.add_att( "file_id", id_str.c_str() ))) return NC_ERR; grid.close(); - + // file closed when file obj goes out of scope return 0; }/*}}}*/ @@ -749,16 +749,16 @@ int mapAndOutputGridCoordinates( const string inputFilename, const string output * **********************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // fetch dimensions NcDim *nCellsDim = grid.get_dim( "nCells" ); NcDim *nEdgesDim = grid.get_dim( "nEdges" ); @@ -774,7 +774,7 @@ int mapAndOutputGridCoordinates( const string inputFilename, const string output NcVar *idx2cellVar, *idx2edgeVar, *idx2vertexVar; int i, idx_map; - + double *xOld, *yOld, *zOld, *latOld, *lonOld; double *xNew, *yNew, *zNew, *latNew, *lonNew; int *idxToNew; @@ -833,7 +833,7 @@ int mapAndOutputGridCoordinates( const string inputFilename, const string output delete[] latNew; delete[] lonNew; delete[] idxToNew; - + //Build and write edge coordinate arrays xOld = new double[nEdges]; yOld = new double[nEdges]; @@ -964,16 +964,16 @@ int mapAndOutputCellFields( const string inputFilename, const string outputFilen * ***************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // fetch dimensions NcDim *nCellsDim = grid.get_dim( "nCells" ); NcDim *nEdgesDim = grid.get_dim( "nEdges" ); @@ -993,7 +993,7 @@ int mapAndOutputCellFields( const string inputFilename, const string outputFilen double *areaCellNew; int *tmp_arr_old, *nEdgesOnCellOld, *nEdgesOnCellNew; int *tmp_arr_new; - + tmp_arr_old = new int[nCells*maxEdges]; nEdgesOnCellOld = new int[nCells]; nEdgesOnCellNew = new int[nCellsNew]; @@ -1106,7 +1106,7 @@ int mapAndOutputCellFields( const string inputFilename, const string outputFilen delete[] tmp_arr_new; // Map areaCell - areaCellNew = new double[nCellsNew]; + areaCellNew = new double[nCellsNew]; for(int iCell = 0; iCell < nCells; iCell++){ if(cellMap.at(iCell) != -1){ @@ -1154,16 +1154,16 @@ int mapAndOutputEdgeFields( const string inputFilename, const string outputFilen * ***************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // fetch dimensions NcDim *nEdgesDim = grid.get_dim( "nEdges" ); NcDim *maxEdges2Dim = grid.get_dim( "maxEdges2" ); @@ -1265,7 +1265,7 @@ int mapAndOutputEdgeFields( const string inputFilename, const string outputFilen #endif } } - + if (!(voeVar = grid.add_var("verticesOnEdge", ncInt, nEdgesDim, twoDim))) return NC_ERR; if (!voeVar->put(verticesOnEdgeNew,nEdgesNew,2)) return NC_ERR; if (!(coeVar = grid.add_var("cellsOnEdge", ncInt, nEdgesDim, twoDim))) return NC_ERR; @@ -1394,16 +1394,16 @@ int mapAndOutputVertexFields( const string inputFilename, const string outputFil * ***************************************************************/ // Return this code to the OS in case of failure. static const int NC_ERR = 2; - + // set error behaviour (matches fortran behaviour) NcError err(NcError::verbose_nonfatal); - + // open the scvtmesh file NcFile grid(outputFilename.c_str(), NcFile::Write); - + // check to see if the file was opened if(!grid.is_valid()) return NC_ERR; - + // fetch dimensions NcDim *nVerticesDim = grid.get_dim( "nVertices" ); NcDim *vertexDegreeDim = grid.get_dim( "vertexDegree" ); diff --git a/grid_gen/mesh_conversion_tools/mpas_mask_creator.cpp b/mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp similarity index 99% rename from grid_gen/mesh_conversion_tools/mpas_mask_creator.cpp rename to mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp index 2cb22cfab..aeb36f746 100644 --- a/grid_gen/mesh_conversion_tools/mpas_mask_creator.cpp +++ b/mesh_tools/mesh_conversion_tools/mpas_mask_creator.cpp @@ -147,8 +147,12 @@ void print_usage() {/*{{{*/ cout << "\t\t\tthat will be used as seed points in a flood fill algorithim. This is useful when trying to remove isolated cells from a mesh." << endl; cout << "\t\t-f file.geojson: This argument pair defines a set of geojson features (regions, transects, or points)" << endl; cout << "\t\t\tthat will be converted into masks / lists." << endl; - cout << "\t\t--positive_lon: This argument causes the logitude range to be 0-360 degrees with the prime meridian at 0 degrees." << endl; - cout << "\t\t\tIf this flag is not set, the logitude range is -180-180 with 0 degrees being the prime meridian." << endl; + cout << "\t\t--positive_lon: It is unlikely that you want this argument. In rare cases when using a non-standard geojson" << endl; + cout << "\t\t\tfile where the logitude ranges from 0 to 360 degrees (with the prime meridian at 0 degrees), use this flag." << endl; + cout << "\t\t\tIf this flag is not set, the logitude range is -180-180 with 0 degrees being the prime meridian, which is the" << endl; + cout << "\t\t\tcase for standar geojson files including all features from the geometric_feature repo." << endl; + cout << "\t\t\tThe fact that longitudes in the input MPAS mesh range from 0 to 360 is not relevant to this flag," << endl; + cout << "\t\t\tas latitude and longitude are recomputed internally from Cartesian coordinates." << endl; cout << "\t\t\tWhether this flag is passed in or not, any longitudes written are in the 0-360 range." << endl; }/*}}}*/ diff --git a/grid_gen/mesh_conversion_tools/mpas_mesh_converter.cpp b/mesh_tools/mesh_conversion_tools/mpas_mesh_converter.cpp similarity index 100% rename from grid_gen/mesh_conversion_tools/mpas_mesh_converter.cpp rename to mesh_tools/mesh_conversion_tools/mpas_mesh_converter.cpp diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/COPYRIGHT b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/COPYRIGHT new file mode 100644 index 000000000..e21824767 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/COPYRIGHT @@ -0,0 +1,42 @@ +/*! \file +The NetCDF Copyright. + +\page copyright Copyright + +Copyright 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, +2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 University +Corporation for Atmospheric Research/Unidata. + +Portions of this software were developed by the Unidata Program at the +University Corporation for Atmospheric Research. + +Access and use of this software shall impose the following obligations +and understandings on the user. The user is granted the right, without +any fee or cost, to use, copy, modify, alter, enhance and distribute +this software, and any derivative works thereof, and its supporting +documentation for any purpose whatsoever, provided that this entire +notice appears in all copies of the software, derivative works and +supporting documentation. Further, UCAR requests that the user credit +UCAR/Unidata in any publications that result from the use of this +software or in any product that includes this software, although this +is not an obligation. The names UCAR and/or Unidata, however, may not +be used in any advertising or publicity to endorse or promote any +products or commercial entity unless specific written permission is +obtained from UCAR/Unidata. The user also understands that +UCAR/Unidata is not obligated to provide the user with any support, +consulting, training or assistance of any kind with regard to the use, +operation and performance of this software nor to provide the user +with any updates, revisions, new versions or "bug fixes." + +THIS SOFTWARE IS PROVIDED BY UCAR/UNIDATA "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL UCAR/UNIDATA BE LIABLE FOR ANY SPECIAL, +INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING +FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +WITH THE ACCESS, USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + + + diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/README b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/README new file mode 100644 index 000000000..e816934e9 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/README @@ -0,0 +1,28 @@ +netcdfcpp.h the C++ interface + +netcdf.cpp the implementation of the interface, on top of the current + C library interface + +nctst.cpp a test program for the interface that creates a netCDF file + and then dumps out its contents in ASCII form to stdout. + This example may also be helpful in understanding how the + interface is intended to be used. + +example.c example of C code needed to create a small netCDF file + +example.cpp analogous example of C++ code needed to do the same thing + +Makefile makefile for building nctst + +ncvalues.cpp interface for auxilliary classes of typed arrays needed by + netCDF interface; fairly simple + +ncvalues.cpp implementation of auxilliary classes of typed arrays needed by + netCDF interface + +README this file + + + + + diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/config.h b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/config.h new file mode 100644 index 000000000..b0f2b81ba --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/config.h @@ -0,0 +1,93 @@ +/* config.h. Generated from config.h.in by configure. */ +/* config.h.in. Generated from configure.ac by autoheader. */ + +/* if true, run extra tests which may not work yet */ +/* #undef EXTRA_TESTS */ + +/* Define to 1 if you have the header file. */ +#define HAVE_DLFCN_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_INTTYPES_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_MEMORY_H 1 + +/* Define to 1 if you have the `nccreate' function. */ +/* #undef HAVE_NCCREATE */ + +/* Define to 1 if you have the `nc_def_opaque' function. */ +/* #undef HAVE_NC_DEF_OPAQUE */ + +/* Define to 1 if you have the `nc_set_log_level' function. */ +/* #undef HAVE_NC_SET_LOG_LEVEL */ + +/* Define to 1 if you have the `nc_use_parallel_enabled' function. */ +/* #undef HAVE_NC_USE_PARALLEL_ENABLED */ + +/* Define to 1 if you have the header file. */ +#define HAVE_NETCDF_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STDINT_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STDLIB_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STRINGS_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STRING_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_SYS_STAT_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_SYS_TYPES_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_UNISTD_H 1 + +/* do large file tests */ +/* #undef LARGE_FILE_TESTS */ + +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#define LT_OBJDIR ".libs/" + +/* Name of package */ +#define PACKAGE "netcdf-cxx" + +/* Define to the address where bug reports for this package should be sent. */ +#define PACKAGE_BUGREPORT "support-netcdf@unidata.ucar.edu" + +/* Define to the full name of this package. */ +#define PACKAGE_NAME "netCDF-cxx" + +/* Define to the full name and version of this package. */ +#define PACKAGE_STRING "netCDF-cxx 4.2" + +/* Define to the one symbol short name of this package. */ +#define PACKAGE_TARNAME "netcdf-cxx" + +/* Define to the home page for this package. */ +#define PACKAGE_URL "" + +/* Define to the version of this package. */ +#define PACKAGE_VERSION "4.2" + +/* Define to 1 if you have the ANSI C header files. */ +#define STDC_HEADERS 1 + +/* Place to put very large netCDF test files. */ +#define TEMP_LARGE "." + +/* Version number of package */ +#define VERSION "4.2" + +/* Number of bits in a file offset, on hosts where this is settable. */ +/* #undef _FILE_OFFSET_BITS */ + +/* Define for large files, on AIX-style hosts. */ +/* #undef _LARGE_FILES */ diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.cpp b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.cpp new file mode 100644 index 000000000..30c642965 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.cpp @@ -0,0 +1,331 @@ +/********************************************************************* + * Copyright 1992, University Corporation for Atmospheric Research + * See netcdf/README file for copying and redistribution conditions. + * + * Purpose: implementation of classes of typed arrays for netCDF + * + * $Header: /upc/share/CVS/netcdf-3/cxx/ncvalues.cpp,v 1.12 2008/03/05 16:45:32 russ Exp $ + *********************************************************************/ + +#include "config.h" +#include +#include +#include + +#include "ncvalues.h" + +NcValues::NcValues( void ) : the_type(ncNoType), the_number(0) +{} + +NcValues::NcValues(NcType type, long num) + : the_type(type), the_number(num) +{} + +NcValues::~NcValues( void ) +{} + +long NcValues::num( void ) +{ + return the_number; +} + +std::ostream& operator<< (std::ostream& os, const NcValues& vals) +{ + return vals.print(os); +} + +implement(NcValues,ncbyte) +implement(NcValues,char) +implement(NcValues,short) +implement(NcValues,int) +implement(NcValues,nclong) +implement(NcValues,long) +implement(NcValues,float) +implement(NcValues,double) + +Ncbytes_for_one_implement(ncbyte) +Ncbytes_for_one_implement(char) +Ncbytes_for_one_implement(short) +Ncbytes_for_one_implement(int) +Ncbytes_for_one_implement(nclong) +Ncbytes_for_one_implement(long) +Ncbytes_for_one_implement(float) +Ncbytes_for_one_implement(double) + +as_ncbyte_implement(short) +as_ncbyte_implement(int) +as_ncbyte_implement(nclong) +as_ncbyte_implement(long) +as_ncbyte_implement(float) +as_ncbyte_implement(double) + +inline ncbyte NcValues_char::as_ncbyte( long n ) const +{ + return the_values[n]; +} + +inline ncbyte NcValues_ncbyte::as_ncbyte( long n ) const +{ + return the_values[n]; +} + +as_char_implement(short) +as_char_implement(int) +as_char_implement(nclong) +as_char_implement(long) +as_char_implement(float) +as_char_implement(double) + +inline char NcValues_ncbyte::as_char( long n ) const +{ + return the_values[n] > CHAR_MAX ? ncBad_char : (char) the_values[n]; +} + +inline char NcValues_char::as_char( long n ) const +{ + return the_values[n]; +} + +as_short_implement(int) +as_short_implement(nclong) +as_short_implement(long) +as_short_implement(float) +as_short_implement(double) + +inline short NcValues_ncbyte::as_short( long n ) const +{ + return the_values[n]; +} + +inline short NcValues_char::as_short( long n ) const +{ + return the_values[n]; +} + +inline short NcValues_short::as_short( long n ) const +{ + return the_values[n]; +} + + +as_int_implement(float) +as_int_implement(double) + +inline int NcValues_ncbyte::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_char::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_short::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_int::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_nclong::as_int( long n ) const +{ + return the_values[n]; +} + +inline int NcValues_long::as_int( long n ) const +{ + return the_values[n]; +} + +as_nclong_implement(float) +as_nclong_implement(double) + +inline nclong NcValues_ncbyte::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_char::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_short::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_int::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_nclong::as_nclong( long n ) const +{ + return the_values[n]; +} + +inline nclong NcValues_long::as_nclong( long n ) const +{ + return the_values[n]; +} + +as_long_implement(float) +as_long_implement(double) + +inline long NcValues_ncbyte::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_char::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_short::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_int::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_nclong::as_long( long n ) const +{ + return the_values[n]; +} + +inline long NcValues_long::as_long( long n ) const +{ + return the_values[n]; +} + +as_float_implement(ncbyte) +as_float_implement(char) +as_float_implement(short) +as_float_implement(int) +as_float_implement(nclong) +as_float_implement(long) +as_float_implement(float) +as_float_implement(double) + +as_double_implement(ncbyte) +as_double_implement(char) +as_double_implement(short) +as_double_implement(int) +as_double_implement(nclong) +as_double_implement(long) +as_double_implement(float) +as_double_implement(double) + +as_string_implement(short) +as_string_implement(int) +as_string_implement(nclong) +as_string_implement(long) +as_string_implement(float) +as_string_implement(double) + +inline char* NcValues_ncbyte::as_string( long n ) const +{ + char* s = new char[the_number + 1]; + s[the_number] = '\0'; + strncpy(s, (const char*)the_values + n, (int)the_number); + return s; +} + +inline char* NcValues_char::as_string( long n ) const +{ + char* s = new char[the_number + 1]; + s[the_number] = '\0'; + strncpy(s, (const char*)the_values + n, (int)the_number); + return s; +} + +std::ostream& NcValues_short::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_int::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_nclong::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_long::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_ncbyte::print(std::ostream& os) const +{ + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + return os; +} + +std::ostream& NcValues_char::print(std::ostream& os) const +{ + os << '"'; + long len = the_number; + while (the_values[--len] == '\0') // don't output trailing null bytes + ; + for(int i = 0; i <= len; i++) + os << the_values[i] ; + os << '"'; + + return os; +} + +std::ostream& NcValues_float::print(std::ostream& os) const +{ + std::streamsize save=os.precision(); + os.precision(7); + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1] ; + os.precision(save); + return os; +} + +std::ostream& NcValues_double::print(std::ostream& os) const +{ + std::streamsize save=os.precision(); + os.precision(15); + for(int i = 0; i < the_number - 1; i++) + os << the_values[i] << ", "; + if (the_number > 0) + os << the_values[the_number-1]; + os.precision(save); + return os; +} diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.h b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.h new file mode 100644 index 000000000..e7655e4bd --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/ncvalues.h @@ -0,0 +1,279 @@ +/********************************************************************* + * Copyright 1992, University Corporation for Atmospheric Research + * See netcdf/README file for copying and redistribution conditions. + * + * Purpose: interface for classes of typed arrays for netCDF + * + * $Header: /upc/share/CVS/netcdf-3/cxx/ncvalues.h,v 1.7 2006/07/26 21:12:06 russ Exp $ + *********************************************************************/ + +#ifndef Ncvalues_def +#define Ncvalues_def + +#include +#include +#include +#include "netcdf.h" + +// Documentation warned this might change and now it has, for +// consistency with C interface +typedef signed char ncbyte; + +#define NC_UNSPECIFIED ((nc_type)0) + +// C++ interface dates from before netcdf-3, still uses some netcdf-2 names +#ifdef NO_NETCDF_2 +#define NC_LONG NC_INT +#define FILL_LONG NC_FILL_INT +typedef int nclong; +#define NC_FATAL 1 +#define NC_VERBOSE 2 +#endif + +enum NcType +{ + ncNoType = NC_UNSPECIFIED, + ncByte = NC_BYTE, + ncChar = NC_CHAR, + ncShort = NC_SHORT, + ncInt = NC_INT, + ncLong = NC_LONG, // deprecated, someday want to use for 64-bit ints + ncFloat = NC_FLOAT, + ncDouble = NC_DOUBLE +}; + +#define ncBad_ncbyte ncBad_byte +static const ncbyte ncBad_byte = NC_FILL_BYTE; +static const char ncBad_char = NC_FILL_CHAR; +static const short ncBad_short = NC_FILL_SHORT; +static const nclong ncBad_nclong = FILL_LONG; // deprecated +static const int ncBad_int = NC_FILL_INT; +static const long ncBad_long = FILL_LONG; // deprecated +static const float ncBad_float = NC_FILL_FLOAT; +static const double ncBad_double = NC_FILL_DOUBLE; + +// macros to glue tokens together to form new names (used to be in generic.h) +#define name2(a,b) a ## b +#define declare(clas,t) name2(clas,declare)(t) +#define implement(clas,t) name2(clas,implement)(t) +// This is the same as the name2 macro, but we need to define our own +// version since rescanning something generated with the name2 macro +// won't necessarily cause name2 to be expanded again. +#define makename2(z, y) makename2_x(z, y) +#define makename2_x(z, y) z##y + +#define NcVal(TYPE) makename2(NcValues_,TYPE) + +#define NcValuesdeclare(TYPE) \ +class NcVal(TYPE) : public NcValues \ +{ \ + public: \ + NcVal(TYPE)( void ); \ + NcVal(TYPE)(long num); \ + NcVal(TYPE)(long num, const TYPE* vals); \ + NcVal(TYPE)(const NcVal(TYPE)&); \ + virtual NcVal(TYPE)& operator=(const NcVal(TYPE)&); \ + virtual ~NcVal(TYPE)( void ); \ + virtual void* base( void ) const; \ + virtual int bytes_for_one( void ) const; \ + virtual ncbyte as_ncbyte( long n ) const; \ + virtual char as_char( long n ) const; \ + virtual short as_short( long n ) const; \ + virtual int as_int( long n ) const; \ + virtual int as_nclong( long n ) const; \ + virtual long as_long( long n ) const; \ + virtual float as_float( long n ) const; \ + virtual double as_double( long n ) const; \ + virtual char* as_string( long n ) const; \ + virtual int invalid( void ) const; \ + private: \ + TYPE* the_values; \ + std::ostream& print(std::ostream&) const; \ +}; + +#define NcTypeEnum(TYPE) makename2(_nc__,TYPE) +#define _nc__ncbyte ncByte +#define _nc__char ncChar +#define _nc__short ncShort +#define _nc__int ncInt +#define _nc__nclong ncLong +#define _nc__long ncLong +#define _nc__float ncFloat +#define _nc__double ncDouble +#define NcValuesimplement(TYPE) \ +NcVal(TYPE)::NcVal(TYPE)( void ) \ + : NcValues(NcTypeEnum(TYPE), 0), the_values(0) \ +{} \ + \ +NcVal(TYPE)::NcVal(TYPE)(long num, const TYPE* vals) \ + : NcValues(NcTypeEnum(TYPE), num) \ +{ \ + the_values = new TYPE[num]; \ + for(int i = 0; i < num; i++) \ + the_values[i] = vals[i]; \ +} \ + \ +NcVal(TYPE)::NcVal(TYPE)(long num) \ + : NcValues(NcTypeEnum(TYPE), num), the_values(new TYPE[num]) \ +{} \ + \ +NcVal(TYPE)::NcVal(TYPE)(const NcVal(TYPE)& v) : \ + NcValues(v) \ +{ \ + delete[] the_values; \ + the_values = new TYPE[v.the_number]; \ + for(int i = 0; i < v.the_number; i++) \ + the_values[i] = v.the_values[i]; \ +} \ + \ +NcVal(TYPE)& NcVal(TYPE)::operator=(const NcVal(TYPE)& v) \ +{ \ + if ( &v != this) { \ + NcValues::operator=(v); \ + delete[] the_values; \ + the_values = new TYPE[v.the_number]; \ + for(int i = 0; i < v.the_number; i++) \ + the_values[i] = v.the_values[i]; \ + } \ + return *this; \ +} \ + \ +void* NcVal(TYPE)::base( void ) const \ +{ \ + return the_values; \ +} \ + \ +NcVal(TYPE)::~NcVal(TYPE)( void ) \ +{ \ + delete[] the_values; \ +} \ + \ +int NcVal(TYPE)::invalid( void ) const \ +{ \ + for(int i=0;i UCHAR_MAX) \ + return ncBad_byte; \ + return (ncbyte) the_values[n]; \ +} + +#define as_char_implement(TYPE) \ +char NcVal(TYPE)::as_char( long n ) const \ +{ \ + if (the_values[n] < CHAR_MIN || the_values[n] > CHAR_MAX) \ + return ncBad_char; \ + return (char) the_values[n]; \ +} + +#define as_short_implement(TYPE) \ +short NcVal(TYPE)::as_short( long n ) const \ +{ \ + if (the_values[n] < SHRT_MIN || the_values[n] > SHRT_MAX) \ + return ncBad_short; \ + return (short) the_values[n]; \ +} + +#define NCINT_MIN INT_MIN +#define NCINT_MAX INT_MAX +#define as_int_implement(TYPE) \ +int NcVal(TYPE)::as_int( long n ) const \ +{ \ + if (the_values[n] < NCINT_MIN || the_values[n] > NCINT_MAX) \ + return ncBad_int; \ + return (int) the_values[n]; \ +} + +#define NCLONG_MIN INT_MIN +#define NCLONG_MAX INT_MAX +#define as_nclong_implement(TYPE) \ +nclong NcVal(TYPE)::as_nclong( long n ) const \ +{ \ + if (the_values[n] < NCLONG_MIN || the_values[n] > NCLONG_MAX) \ + return ncBad_nclong; \ + return (nclong) the_values[n]; \ +} + +#define as_long_implement(TYPE) \ +long NcVal(TYPE)::as_long( long n ) const \ +{ \ + if (the_values[n] < LONG_MIN || the_values[n] > LONG_MAX) \ + return ncBad_long; \ + return (long) the_values[n]; \ +} + +#define as_float_implement(TYPE) \ +inline float NcVal(TYPE)::as_float( long n ) const \ +{ \ + return (float) the_values[n]; \ +} + +#define as_double_implement(TYPE) \ +inline double NcVal(TYPE)::as_double( long n ) const \ +{ \ + return (double) the_values[n]; \ +} + +#define as_string_implement(TYPE) \ +char* NcVal(TYPE)::as_string( long n ) const \ +{ \ + char* s = new char[32]; \ + std::ostringstream ostr; \ + ostr << the_values[n]; \ + ostr.str().copy(s, std::string::npos); \ + s[ostr.str().length()] = 0; \ + return s; \ +} + +class NcValues // ABC for value blocks +{ + public: + NcValues( void ); + NcValues(NcType, long); + virtual ~NcValues( void ); + virtual long num( void ); + virtual std::ostream& print(std::ostream&) const = 0; + virtual void* base( void ) const = 0; + virtual int bytes_for_one( void ) const = 0; + + // The following member functions provide conversions from the value + // type to a desired basic type. If the value is out of range, the + // default "fill-value" for the appropriate type is returned. + virtual ncbyte as_ncbyte( long n ) const = 0; // nth value as a byte + virtual char as_char( long n ) const = 0; // nth value as char + virtual short as_short( long n ) const = 0; // nth value as short + virtual int as_int( long n ) const = 0; // nth value as int + virtual int as_nclong( long n ) const = 0; // nth value as nclong + virtual long as_long( long n ) const = 0; // nth value as long + virtual float as_float( long n ) const = 0; // nth value as floating-point + virtual double as_double( long n ) const = 0; // nth value as double + virtual char* as_string( long n ) const = 0; // value as string + + protected: + NcType the_type; + long the_number; + friend std::ostream& operator<< (std::ostream&, const NcValues&); +}; + +declare(NcValues,ncbyte) +declare(NcValues,char) +declare(NcValues,short) +declare(NcValues,int) +declare(NcValues,nclong) +declare(NcValues,long) +declare(NcValues,float) +declare(NcValues,double) + +#endif diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.cpp b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.cpp new file mode 100644 index 000000000..6cb0e748b --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.cpp @@ -0,0 +1,1658 @@ +/********************************************************************* + * Copyright 1992, University Corporation for Atmospheric Research + * See netcdf/README file for copying and redistribution conditions. + * + * Purpose: Implements class interface for netCDF over C interface + * + * $Header: /upc/share/CVS/netcdf-3/cxx/netcdf.cpp,v 1.18 2009/03/10 15:20:54 russ Exp $ + *********************************************************************/ + +#include +#include +#include +#include +#include "netcdfcpp.h" + +#ifndef TRUE +#define TRUE 1 +#define FALSE 0 +#endif + +static const int ncGlobal = NC_GLOBAL; // psuedo-variable for global attributes + +static const int ncBad = -1; // failure return for netCDF C interface + +NcFile::~NcFile( void ) +{ + (void) close(); +} + +NcBool NcFile::is_valid( void ) const +{ + return the_id != ncBad; +} + +int NcFile::num_dims( void ) const +{ + int num = 0; + if (is_valid()) + NcError::set_err( + nc_inq_ndims(the_id, &num) + ); + return num; +} + +int NcFile::num_vars( void ) const +{ + int num = 0; + if (is_valid()) + NcError::set_err( + nc_inq_nvars(the_id, &num) + ); + return num; +} + +int NcFile::num_atts( void ) const +{ + int num = 0; + if (is_valid()) + NcError::set_err( + nc_inq_natts(the_id, &num) + ); + return num; +} + +NcDim* NcFile::get_dim( NcToken name ) const +{ + int dimid; + if(NcError::set_err( + nc_inq_dimid(the_id, name, &dimid) + ) != NC_NOERR) + return 0; + return get_dim(dimid); +} + +NcVar* NcFile::get_var( NcToken name ) const +{ + int varid; + if(NcError::set_err( + nc_inq_varid(the_id, name, &varid) + ) != NC_NOERR) + return 0; + return get_var(varid); +} + +NcAtt* NcFile::get_att( NcToken aname ) const +{ + return is_valid() ? globalv->get_att(aname) : 0; +} + +NcDim* NcFile::get_dim( int i ) const +{ + if (! is_valid() || i < 0 || i >= num_dims()) + return 0; + return dimensions[i]; +} + +NcVar* NcFile::get_var( int i ) const +{ + if (! is_valid() || i < 0 || i >= num_vars()) + return 0; + return variables[i]; +} + +NcAtt* NcFile::get_att( int n ) const +{ + return is_valid() ? globalv->get_att(n) : 0; +} + +NcDim* NcFile::rec_dim( ) const +{ + if (! is_valid()) + return 0; + int recdim; + if(NcError::set_err( + nc_inq_unlimdim(the_id, &recdim) + ) != NC_NOERR) + return 0; + return get_dim(recdim); +} + +NcDim* NcFile::add_dim(NcToken name, long size) +{ + if (!is_valid() || !define_mode()) + return 0; + int n = num_dims(); + NcDim* dimp = new NcDim(this, name, size); + dimensions[n] = dimp; // for garbage collection on close() + return dimp; +} + +NcDim* NcFile::add_dim(NcToken name) +{ + return add_dim(name, NC_UNLIMITED); +} + +// To create scalar, 1-dimensional, ..., 5-dimensional variables, just supply +// as many dimension arguments as necessary + +NcVar* NcFile::add_var(NcToken name, NcType type, // scalar to 5D var + const NcDim* dim0, + const NcDim* dim1, + const NcDim* dim2, + const NcDim* dim3, + const NcDim* dim4) +{ + if (!is_valid() || !define_mode()) + return 0; + int dims[5]; + int ndims = 0; + if (dim0) { + ndims++; + dims[0] = dim0->id(); + if (dim1) { + ndims++; + dims[1] = dim1->id(); + if (dim2) { + ndims++; + dims[2] = dim2->id(); + if (dim3) { + ndims++; + dims[3] = dim3->id(); + if (dim4) { + ndims++; + dims[4] = dim4->id(); + } + } + } + } + } + int n = num_vars(); + int varid; + if(NcError::set_err( + nc_def_var(the_id, name, (nc_type) type, ndims, dims, &varid) + ) != NC_NOERR) + return 0; + NcVar* varp = + new NcVar(this, varid); + variables[n] = varp; + return varp; +} + +// For variables with more than 5 dimensions, use n-dimensional interface +// with vector of dimensions. + +NcVar* NcFile::add_var(NcToken name, NcType type, int ndims, const NcDim** dims) +{ + if (!is_valid() || !define_mode()) + return 0; + int* dimids = new int[ndims]; + for (int i=0; i < ndims; i++) + dimids[i] = dims[i]->id(); + int n = num_vars(); + int varid; + if(NcError::set_err( + nc_def_var(the_id, name, (nc_type) type, ndims, dimids, &varid) + ) != NC_NOERR) + return 0; + NcVar* varp = + new NcVar(this, varid); + variables[n] = varp; + delete [] dimids; + return varp; +} + +#define NcFile_add_scalar_att(TYPE) \ +NcBool NcFile::add_att(NcToken aname, TYPE val) \ +{ \ + return globalv->add_att(aname, val); \ +} + +NcFile_add_scalar_att(char) +NcFile_add_scalar_att(ncbyte) +NcFile_add_scalar_att(short) +NcFile_add_scalar_att(int) +NcFile_add_scalar_att(long) +NcFile_add_scalar_att(float) +NcFile_add_scalar_att(double) +NcFile_add_scalar_att(const char*) + +#define NcFile_add_vector_att(TYPE) \ +NcBool NcFile::add_att(NcToken aname, int n, const TYPE* val) \ +{ \ + return globalv->add_att(aname, n, val); \ +} + +NcFile_add_vector_att(char) +NcFile_add_vector_att(ncbyte) +NcFile_add_vector_att(short) +NcFile_add_vector_att(int) +NcFile_add_vector_att(long) +NcFile_add_vector_att(float) +NcFile_add_vector_att(double) + +NcBool NcFile::set_fill( FillMode a_mode ) +{ + int prev_mode; + if (NcError::set_err( + nc_set_fill(the_id, a_mode, &prev_mode) + ) == NC_NOERR) { + the_fill_mode = a_mode; + return TRUE; + } + return FALSE; +} + +NcFile::FillMode NcFile::get_fill( void ) const +{ + return the_fill_mode; +} + +NcFile::FileFormat NcFile::get_format( void ) const +{ + int the_format; + NcError::set_err( + nc_inq_format(the_id, &the_format) + ); + switch (the_format) { + case NC_FORMAT_CLASSIC: + return Classic; + case NC_FORMAT_64BIT: + return Offset64Bits; + case NC_FORMAT_NETCDF4: + return Netcdf4; + case NC_FORMAT_NETCDF4_CLASSIC: + return Netcdf4Classic; + default: + return BadFormat; + } +} + +NcBool NcFile::sync( void ) +{ + if (!data_mode()) + return 0; + if (NcError::set_err( + nc_sync(the_id) + ) != NC_NOERR) + return 0; + int i; + for (i = 0; i < num_dims(); i++) { + if (dimensions[i]->is_valid()) { + dimensions[i]->sync(); + } else { // someone else added a new dimension + dimensions[i] = new NcDim(this,i); + } + } + for (i = 0; i < num_vars(); i++) { + if (variables[i]->is_valid()) { + variables[i]->sync(); + } else { // someone else added a new variable + variables[i] = new NcVar(this,i); + } + } + return 1; +} + +NcBool NcFile::close( void ) +{ + int i; + + if (the_id == ncBad) + return 0; + for (i = 0; i < num_dims(); i++) + delete dimensions[i]; + for (i = 0; i < num_vars(); i++) + delete variables[i]; + delete [] dimensions; + delete [] variables; + delete globalv; + int old_id = the_id; + the_id = ncBad; + return NcError::set_err( + nc_close(old_id) + ) == NC_NOERR; +} + +NcBool NcFile::abort( void ) +{ + return NcError::set_err( + nc_abort(the_id) + ) == NC_NOERR; +} + +NcBool NcFile::define_mode( void ) +{ + if (! is_valid()) + return FALSE; + if (in_define_mode) + return TRUE; + if (NcError::set_err( + nc_redef(the_id) + ) != NC_NOERR) + return FALSE; + in_define_mode = 1; + return TRUE; +} + +NcBool NcFile::data_mode( void ) +{ + if (! is_valid()) + return FALSE; + if (! in_define_mode) + return TRUE; + if (NcError::set_err( + nc_enddef(the_id) + ) != NC_NOERR) + return FALSE; + in_define_mode = 0; + return TRUE; +} + +int NcFile::id( void ) const +{ + return the_id; +} + +NcFile::NcFile( const char* path, FileMode fmode, + size_t* bufrsizeptr, size_t initialsize, FileFormat fformat ) +{ + NcError err(NcError::silent_nonfatal); // constructor must not fail + + int mode = NC_NOWRITE; + the_fill_mode = Fill; + int status; + + // If the user wants a 64-bit offset format, set that flag. + if (fformat == Offset64Bits) + mode |= NC_64BIT_OFFSET; +#ifdef USE_NETCDF4 + else if (fformat == Netcdf4) + mode |= NC_NETCDF4; + else if (fformat == Netcdf4Classic) + mode |= NC_NETCDF4|NC_CLASSIC_MODEL; +#endif + + switch (fmode) { + case Write: + mode |= NC_WRITE; + /*FALLTHRU*/ + case ReadOnly: + // use netcdf-3 interface to permit specifying tuning parameter + status = NcError::set_err( + nc__open(path, mode, bufrsizeptr, &the_id) + ); + if(status != NC_NOERR) + { + NcError::set_err(status); + the_id = -1; + } + in_define_mode = 0; + break; + case New: + mode |= NC_NOCLOBBER; + /*FALLTHRU*/ + case Replace: + // use netcdf-3 interface to permit specifying tuning parameters + status = NcError::set_err( + nc__create(path, mode, initialsize, + bufrsizeptr, &the_id) + ); + if(status != NC_NOERR) + { + NcError::set_err(status); + the_id = -1; + } + in_define_mode = 1; + break; + default: + the_id = ncBad; + in_define_mode = 0; + break; + } + if (is_valid()) { + dimensions = new NcDim*[NC_MAX_DIMS]; + variables = new NcVar*[NC_MAX_VARS]; + int i; + for (i = 0; i < num_dims(); i++) + dimensions[i] = new NcDim(this, i); + for (i = 0; i < num_vars(); i++) + variables[i] = new NcVar(this, i); + globalv = new NcVar(this, ncGlobal); + } else { + dimensions = 0; + variables = 0; + globalv = 0; + } +} + +NcToken NcDim::name( void ) const +{ + return the_name; +} + +long NcDim::size( void ) const +{ + size_t sz = 0; + if (the_file) + NcError::set_err( + nc_inq_dimlen(the_file->id(), the_id, &sz) + ); + return sz; +} + +NcBool NcDim::is_valid( void ) const +{ + return the_file->is_valid() && the_id != ncBad; +} + +NcBool NcDim::is_unlimited( void ) const +{ + if (!the_file) + return FALSE; + int recdim; + NcError::set_err( + nc_inq_unlimdim(the_file->id(), &recdim) + ); + return the_id == recdim; +} + +NcBool NcDim::rename(NcToken newname) +{ + if (strlen(newname) > strlen(the_name)) { + if (! the_file->define_mode()) + return FALSE; + } + NcBool ret = NcError::set_err( + nc_rename_dim(the_file->id(), the_id, newname) + ) == NC_NOERR; + if (ret) { + delete [] the_name; + the_name = new char[1 + strlen(newname)]; + strcpy(the_name, newname); + } + return ret; +} + +int NcDim::id( void ) const +{ + return the_id; +} + +NcBool NcDim::sync(void) +{ + char nam[NC_MAX_NAME]; + if (the_name) { + delete [] the_name; + } + if (the_file && NcError::set_err( + nc_inq_dimname(the_file->id(), the_id, nam) + ) == NC_NOERR) { + the_name = new char[strlen(nam) + 1]; + strcpy(the_name, nam); + return TRUE; + } + the_name = 0; + return FALSE; +} + +NcDim::NcDim(NcFile* nc, int id) + : the_file(nc), the_id(id) +{ + char nam[NC_MAX_NAME]; + if (the_file && NcError::set_err( + nc_inq_dimname(the_file->id(), the_id, nam) + ) == NC_NOERR) { + the_name = new char[strlen(nam) + 1]; + strcpy(the_name, nam); + } else { + the_name = 0; + } +} + +NcDim::NcDim(NcFile* nc, NcToken name, long sz) + : the_file(nc) +{ + size_t dimlen = sz; + if(NcError::set_err( + nc_def_dim(the_file->id(), name, dimlen, &the_id) + ) == NC_NOERR) { + the_name = new char[strlen(name) + 1]; + strcpy(the_name, name); + } else { + the_name = 0; + } +} + +NcDim::~NcDim( void ) +{ + delete [] the_name; +} + +#define Nc_as(TYPE) name2(as_,TYPE) +#define NcTypedComponent_as(TYPE) \ +TYPE NcTypedComponent::Nc_as(TYPE)( long n ) const \ +{ \ + NcValues* tmp = values(); \ + TYPE rval = tmp->Nc_as(TYPE)(n); \ + delete tmp; \ + return rval; \ +} +NcTypedComponent_as(ncbyte) +NcTypedComponent_as(char) +NcTypedComponent_as(short) +NcTypedComponent_as(int) +NcTypedComponent_as(nclong) +NcTypedComponent_as(long) +NcTypedComponent_as(float) +NcTypedComponent_as(double) + +char* NcTypedComponent::as_string( long n ) const +{ + NcValues* tmp = values(); + char* rval = tmp->as_string(n); + delete tmp; + return rval; +} + +NcTypedComponent::NcTypedComponent ( NcFile* nc ) + : the_file(nc) +{} + +NcValues* NcTypedComponent::get_space( long numVals ) const +{ + NcValues* valp; + if (numVals < 1) + numVals = num_vals(); + switch (type()) { + case ncFloat: + valp = new NcValues_float(numVals); + break; + case ncDouble: + valp = new NcValues_double(numVals); + break; + case ncInt: + valp = new NcValues_int(numVals); + break; + case ncShort: + valp = new NcValues_short(numVals); + break; + case ncByte: + case ncChar: + valp = new NcValues_char(numVals); + break; + case ncNoType: + default: + valp = 0; + } + return valp; +} + +NcVar::~NcVar( void ) +{ + delete[] the_cur; + delete[] cur_rec; + delete[] the_name; +} + +NcToken NcVar::name( void ) const +{ + return the_name; +} + +NcType NcVar::type( void ) const +{ + nc_type typ; + NcError::set_err( + nc_inq_vartype(the_file->id(), the_id, &typ) + ); + return (NcType) typ; +} + +NcBool NcVar::is_valid( void ) const +{ + return the_file->is_valid() && the_id != ncBad; +} + +int NcVar::num_dims( void ) const +{ + int ndim; + NcError::set_err( + nc_inq_varndims(the_file->id(), the_id, &ndim) + ); + return ndim; +} + +// The i-th dimension for this variable +NcDim* NcVar::get_dim( int i ) const +{ + int ndim; + int dims[NC_MAX_DIMS]; + if(NcError::set_err( + nc_inq_var(the_file->id(), the_id, 0, 0, &ndim, dims, 0) + ) != NC_NOERR || + i < 0 || i >= ndim) + return 0; + return the_file->get_dim(dims[i]); +} + +long* NcVar::edges( void ) const // edge lengths (dimension sizes) +{ + long* evec = new long[num_dims()]; + for(int i=0; i < num_dims(); i++) + evec[i] = get_dim(i)->size(); + return evec; +} + +int NcVar::num_atts( void ) const // handles variable and global atts +{ + int natt = 0; + if (the_file->is_valid()) + if (the_id == ncGlobal) + natt = the_file->num_atts(); + else + NcError::set_err( + nc_inq_varnatts(the_file->id(), the_id, &natt) + ); + return natt; +} + +NcAtt* NcVar::get_att( NcToken aname ) const +{ + NcAtt* att = new NcAtt(the_file, this, aname); + if (! att->is_valid()) { + delete att; + return 0; + } + return att; +} + +NcAtt* NcVar::get_att( int n ) const +{ + if (n < 0 || n >= num_atts()) + return 0; + NcToken aname = attname(n); + NcAtt* ap = get_att(aname); + delete [] (char*)aname; + return ap; +} + +long NcVar::num_vals( void ) const +{ + long prod = 1; + for (int d = 0; d < num_dims(); d++) + prod *= get_dim(d)->size(); + return prod; +} + +NcValues* NcVar::values( void ) const +{ + int ndims = num_dims(); + size_t crnr[NC_MAX_DIMS]; + size_t edgs[NC_MAX_DIMS]; + for (int i = 0; i < ndims; i++) { + crnr[i] = 0; + edgs[i] = get_dim(i)->size(); + } + NcValues* valp = get_space(); + int status; + switch (type()) { + case ncFloat: + status = NcError::set_err( + nc_get_vara_float(the_file->id(), the_id, crnr, edgs, + (float *)valp->base()) + ); + break; + case ncDouble: + status = NcError::set_err( + nc_get_vara_double(the_file->id(), the_id, crnr, edgs, + (double *)valp->base()) + ); + break; + case ncInt: + status = NcError::set_err( + nc_get_vara_int(the_file->id(), the_id, crnr, edgs, + (int *)valp->base()) + ); + break; + case ncShort: + status = NcError::set_err( + nc_get_vara_short(the_file->id(), the_id, crnr, edgs, + (short *)valp->base()) + ); + break; + case ncByte: + status = NcError::set_err( + nc_get_vara_schar(the_file->id(), the_id, crnr, edgs, + (signed char *)valp->base()) + ); + break; + case ncChar: + status = NcError::set_err( + nc_get_vara_text(the_file->id(), the_id, crnr, edgs, + (char *)valp->base()) + ); + break; + case ncNoType: + default: + return 0; + } + if (status != NC_NOERR) + return 0; + return valp; +} + +int NcVar::dim_to_index(NcDim *rdim) +{ + for (int i=0; i < num_dims() ; i++) { + if (strcmp(get_dim(i)->name(),rdim->name()) == 0) { + return i; + } + } + // we should fail and gripe about it here.... + return -1; +} + +void NcVar::set_rec(NcDim *rdim, long slice) +{ + int i = dim_to_index(rdim); + // we should fail and gripe about it here.... + if (slice >= get_dim(i)->size() && ! get_dim(i)->is_unlimited()) + return; + cur_rec[i] = slice; + return; +} + +void NcVar::set_rec(long rec) +{ + // Since we can't ask for the record dimension here + // just assume [0] is it..... + set_rec(get_dim(0),rec); + return; +} + +NcValues* NcVar::get_rec(void) +{ + return get_rec(get_dim(0), cur_rec[0]); +} + +NcValues* NcVar::get_rec(long rec) +{ + return get_rec(get_dim(0), rec); +} + +NcValues* NcVar::get_rec(NcDim* rdim, long slice) +{ + int idx = dim_to_index(rdim); + long size = num_dims(); + size_t* start = new size_t[size]; + long* startl = new long[size]; + for (int i=1; i < size ; i++) { + start[i] = 0; + startl[i] = 0; + } + start[idx] = slice; + startl[idx] = slice; + NcBool result = set_cur(startl); + if (! result ) { + delete [] start; + delete [] startl; + return 0; + } + + long* edgel = edges(); + size_t* edge = new size_t[size]; + for (int i=1; i < size ; i++) { + edge[i] = edgel[i]; + } + edge[idx] = 1; + edgel[idx] = 1; + NcValues* valp = get_space(rec_size(rdim)); + int status; + switch (type()) { + case ncFloat: + status = NcError::set_err( + nc_get_vara_float(the_file->id(), the_id, start, edge, + (float *)valp->base()) + ); + break; + case ncDouble: + status = NcError::set_err( + nc_get_vara_double(the_file->id(), the_id, start, edge, + (double *)valp->base()) + ); + break; + case ncInt: + status = NcError::set_err( + nc_get_vara_int(the_file->id(), the_id, start, edge, + (int *)valp->base()) + ); + break; + case ncShort: + status = NcError::set_err( + nc_get_vara_short(the_file->id(), the_id, start, edge, + (short *)valp->base()) + ); + break; + case ncByte: + status = NcError::set_err( + nc_get_vara_schar(the_file->id(), the_id, start, edge, + (signed char *)valp->base()) + ); + break; + case ncChar: + status = NcError::set_err( + nc_get_vara_text(the_file->id(), the_id, start, edge, + (char *)valp->base()) + ); + break; + case ncNoType: + default: + return 0; + } + delete [] start; + delete [] startl; + delete [] edge; + delete [] edgel; + if (status != NC_NOERR) { + delete valp; + return 0; + } + return valp; +} + + +#define NcVar_put_rec(TYPE) \ +NcBool NcVar::put_rec( const TYPE* vals) \ +{ \ + return put_rec(get_dim(0), vals, cur_rec[0]); \ +} \ + \ +NcBool NcVar::put_rec( NcDim *rdim, const TYPE* vals) \ +{ \ + int idx = dim_to_index(rdim); \ + return put_rec(rdim, vals, cur_rec[idx]); \ +} \ + \ +NcBool NcVar::put_rec( const TYPE* vals, \ + long rec) \ +{ \ + return put_rec(get_dim(0), vals, rec); \ +} \ + \ +NcBool NcVar::put_rec( NcDim* rdim, const TYPE* vals, \ + long slice) \ +{ \ + int idx = dim_to_index(rdim); \ + long size = num_dims(); \ + long* start = new long[size]; \ + for (int i=1; i < size ; i++) start[i] = 0; \ + start[idx] = slice; \ + NcBool result = set_cur(start); \ + delete [] start; \ + if (! result ) \ + return FALSE; \ + \ + long* edge = edges(); \ + edge[idx] = 1; \ + result = put(vals, edge); \ + delete [] edge; \ + return result; \ +} + +NcVar_put_rec(ncbyte) +NcVar_put_rec(char) +NcVar_put_rec(short) +NcVar_put_rec(int) +NcVar_put_rec(long) +NcVar_put_rec(float) +NcVar_put_rec(double) + +long NcVar::rec_size(void) { + return rec_size(get_dim(0)); +} + +long NcVar::rec_size(NcDim *rdim) { + int idx = dim_to_index(rdim); + long size = 1; + long* edge = edges(); + for( int i = 0 ; idata_mode()) \ + return -1; \ +int idx = dim_to_index(rdim); \ +long maxrec = get_dim(idx)->size(); \ +long maxvals = rec_size(rdim); \ +NcValues* val; \ +int validx; \ +for (long j=0; jas_ ## TYPE(validx)) break; \ + } \ + delete val; \ + if (validx == maxvals) return j; \ + } \ +return -1; \ +} + + +NcVar_get_index(ncbyte) +NcVar_get_index(char) +NcVar_get_index(short) +NcVar_get_index(nclong) +NcVar_get_index(long) +NcVar_get_index(float) +NcVar_get_index(double) + +// Macros below work for short, nclong, long, float, and double, but for ncbyte +// and char, we must use corresponding schar, uchar, or text C functions, so in +// these cases macros are expanded manually. +#define NcVar_put_array(TYPE) \ +NcBool NcVar::put( const TYPE* vals, \ + long edge0, \ + long edge1, \ + long edge2, \ + long edge3, \ + long edge4) \ +{ \ + /* no need to check type() vs. TYPE, invoked C function will do that */ \ + if (! the_file->data_mode()) \ + return FALSE; \ + size_t count[5]; \ + count[0] = edge0; \ + count[1] = edge1; \ + count[2] = edge2; \ + count[3] = edge3; \ + count[4] = edge4; \ + for (int i = 0; i < 5; i++) { \ + if (count[i]) { \ + if (num_dims() < i) \ + return FALSE; \ + } else \ + break; \ + } \ + size_t start[5]; \ + for (int j = 0; j < 5; j++) { \ + start[j] = the_cur[j]; \ + } \ + return NcError::set_err( \ + makename2(nc_put_vara_,TYPE) (the_file->id(), the_id, start, count, vals) \ + ) == NC_NOERR; \ +} + +NcBool NcVar::put( const ncbyte* vals, + long edge0, + long edge1, + long edge2, + long edge3, + long edge4) +{ + /* no need to check type() vs. TYPE, invoked C function will do that */ + if (! the_file->data_mode()) + return FALSE; + size_t count[5]; + count[0] = edge0; + count[1] = edge1; + count[2] = edge2; + count[3] = edge3; + count[4] = edge4; + for (int i = 0; i < 5; i++) { + if (count[i]) { + if (num_dims() < i) + return FALSE; + } else + break; + } + size_t start[5]; + for (int j = 0; j < 5; j++) { + start[j] = the_cur[j]; + } + return NcError::set_err( + nc_put_vara_schar (the_file->id(), the_id, start, count, vals) + ) == NC_NOERR; +} + +NcBool NcVar::put( const char* vals, + long edge0, + long edge1, + long edge2, + long edge3, + long edge4) +{ + /* no need to check type() vs. TYPE, invoked C function will do that */ + if (! the_file->data_mode()) + return FALSE; + size_t count[5]; + count[0] = edge0; + count[1] = edge1; + count[2] = edge2; + count[3] = edge3; + count[4] = edge4; + for (int i = 0; i < 5; i++) { + if (count[i]) { + if (num_dims() < i) + return FALSE; + } else + break; + } + size_t start[5]; + for (int j = 0; j < 5; j++) { + start[j] = the_cur[j]; + } + return NcError::set_err( + nc_put_vara_text (the_file->id(), the_id, start, count, vals) + ) == NC_NOERR; +} + +NcVar_put_array(short) +NcVar_put_array(int) +NcVar_put_array(long) +NcVar_put_array(float) +NcVar_put_array(double) + +#define NcVar_put_nd_array(TYPE) \ +NcBool NcVar::put( const TYPE* vals, const long* count ) \ +{ \ + /* no need to check type() vs. TYPE, invoked C function will do that */ \ + if (! the_file->data_mode()) \ + return FALSE; \ + size_t start[NC_MAX_DIMS]; \ + for (int i = 0; i < num_dims(); i++) \ + start[i] = the_cur[i]; \ + return NcError::set_err( \ + makename2(nc_put_vara_,TYPE) (the_file->id(), the_id, start, (const size_t *) count, vals) \ + ) == NC_NOERR; \ +} + +NcBool NcVar::put( const ncbyte* vals, const long* count ) +{ + /* no need to check type() vs. TYPE, invoked C function will do that */ + if (! the_file->data_mode()) + return FALSE; + size_t start[NC_MAX_DIMS]; + for (int i = 0; i < num_dims(); i++) + start[i] = the_cur[i]; + return NcError::set_err( + nc_put_vara_schar (the_file->id(), the_id, start, (const size_t *)count, vals) + ) == NC_NOERR; +} + +NcBool NcVar::put( const char* vals, const long* count ) +{ + /* no need to check type() vs. TYPE, invoked C function will do that */ + if (! the_file->data_mode()) + return FALSE; + size_t start[NC_MAX_DIMS]; + for (int i = 0; i < num_dims(); i++) + start[i] = the_cur[i]; + return NcError::set_err( + nc_put_vara_text (the_file->id(), the_id, start, (const size_t *)count, vals) + ) == NC_NOERR; +} + +NcVar_put_nd_array(short) +NcVar_put_nd_array(int) +NcVar_put_nd_array(long) +NcVar_put_nd_array(float) +NcVar_put_nd_array(double) + +#define NcVar_get_array(TYPE) \ +NcBool NcVar::get( TYPE* vals, \ + long edge0, \ + long edge1, \ + long edge2, \ + long edge3, \ + long edge4) const \ +{ \ + if (! the_file->data_mode()) \ + return FALSE; \ + size_t count[5]; \ + count[0] = edge0; \ + count[1] = edge1; \ + count[2] = edge2; \ + count[3] = edge3; \ + count[4] = edge4; \ + for (int i = 0; i < 5; i++) { \ + if (count[i]) { \ + if (num_dims() < i) \ + return FALSE; \ + } else \ + break; \ + } \ + size_t start[5]; \ + for (int j = 0; j < 5; j++) { \ + start[j] = the_cur[j]; \ + } \ + return NcError::set_err( \ + makename2(nc_get_vara_,TYPE) (the_file->id(), the_id, start, count, vals) \ + ) == NC_NOERR; \ +} + +NcBool NcVar::get( ncbyte* vals, + long edge0, + long edge1, + long edge2, + long edge3, + long edge4) const +{ + if (! the_file->data_mode()) + return FALSE; + size_t count[5]; + count[0] = edge0; + count[1] = edge1; + count[2] = edge2; + count[3] = edge3; + count[4] = edge4; + for (int i = 0; i < 5; i++) { + if (count[i]) { + if (num_dims() < i) + return FALSE; + } else + break; + } + size_t start[5]; + for (int j = 0; j < 5; j++) { + start[j] = the_cur[j]; + } + return NcError::set_err( + nc_get_vara_schar (the_file->id(), the_id, start, count, vals) + ) == NC_NOERR; +} + +NcBool NcVar::get( char* vals, + long edge0, + long edge1, + long edge2, + long edge3, + long edge4) const +{ + if (! the_file->data_mode()) + return FALSE; + size_t count[5]; + count[0] = edge0; + count[1] = edge1; + count[2] = edge2; + count[3] = edge3; + count[4] = edge4; + for (int i = 0; i < 5; i++) { + if (count[i]) { + if (num_dims() < i) + return FALSE; + } else + break; + } + size_t start[5]; + for (int j = 0; j < 5; j++) { + start[j] = the_cur[j]; + } + return NcError::set_err( + nc_get_vara_text (the_file->id(), the_id, start, count, vals) + ) == NC_NOERR; +} + +NcVar_get_array(short) +NcVar_get_array(int) +NcVar_get_array(long) +NcVar_get_array(float) +NcVar_get_array(double) + +#define NcVar_get_nd_array(TYPE) \ +NcBool NcVar::get( TYPE* vals, const long* count ) const \ +{ \ + if (! the_file->data_mode()) \ + return FALSE; \ + size_t start[NC_MAX_DIMS]; \ + for (int i = 0; i < num_dims(); i++) \ + start[i] = the_cur[i]; \ + return NcError::set_err( \ + makename2(nc_get_vara_,TYPE) (the_file->id(), the_id, start, (const size_t *) count, vals) \ + ) == NC_NOERR; \ +} + +NcBool NcVar::get( ncbyte* vals, const long* count ) const +{ + if (! the_file->data_mode()) + return FALSE; + size_t start[NC_MAX_DIMS]; + for (int i = 0; i < num_dims(); i++) + start[i] = the_cur[i]; + return nc_get_vara_schar (the_file->id(), the_id, start, (const size_t *) count, vals) == NC_NOERR; +} + +NcBool NcVar::get( char* vals, const long* count ) const +{ + if (! the_file->data_mode()) + return FALSE; + size_t start[NC_MAX_DIMS]; + for (int i = 0; i < num_dims(); i++) + start[i] = the_cur[i]; + return nc_get_vara_text (the_file->id(), the_id, start, (const size_t*) count, vals) == NC_NOERR; +} + +NcVar_get_nd_array(short) +NcVar_get_nd_array(int) +NcVar_get_nd_array(long) +NcVar_get_nd_array(float) +NcVar_get_nd_array(double) + +// If no args, set cursor to all zeros. Else set initial elements of cursor +// to args provided, rest to zeros. +NcBool NcVar::set_cur(long c0, long c1, long c2, long c3, long c4) +{ + long t[6]; + t[0] = c0; + t[1] = c1; + t[2] = c2; + t[3] = c3; + t[4] = c4; + t[5] = -1; + for(int j = 0; j < 6; j++) { // find how many parameters were used + int i; + if (t[j] == -1) { + if (num_dims() < j) + return FALSE; // too many for variable's dimensionality + for (i = 0; i < j; i++) { + if (t[i] >= get_dim(i)->size() && ! get_dim(i)->is_unlimited()) + return FALSE; // too big for dimension + the_cur[i] = t[i]; + } + for(i = j; i < num_dims(); i++) + the_cur[i] = 0; + return TRUE; + } + } + return TRUE; +} + +NcBool NcVar::set_cur(long* cur) +{ + for(int i = 0; i < num_dims(); i++) { + if (cur[i] >= get_dim(i)->size() && ! get_dim(i)->is_unlimited()) + return FALSE; + the_cur[i] = cur[i]; + } + return TRUE; +} + +#define NcVar_add_scalar_att(TYPE) \ +NcBool NcVar::add_att(NcToken aname, TYPE val) \ +{ \ + if (! the_file->define_mode()) \ + return FALSE; \ + if (NcError::set_err( \ + makename2(nc_put_att_,TYPE) (the_file->id(), the_id, aname, (nc_type) NcTypeEnum(TYPE), \ + 1, &val) \ + ) != NC_NOERR) \ + return FALSE; \ + return TRUE; \ +} + +NcBool NcVar::add_att(NcToken aname, ncbyte val) +{ + if (! the_file->define_mode()) + return FALSE; + if (nc_put_att_schar (the_file->id(), the_id, aname, (nc_type) NcTypeEnum(ncbyte), + 1, &val) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcBool NcVar::add_att(NcToken aname, char val) +{ + if (! the_file->define_mode()) + return FALSE; + if (nc_put_att_text (the_file->id(), the_id, aname, + 1, &val) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcVar_add_scalar_att(short) +NcVar_add_scalar_att(int) +NcVar_add_scalar_att(long) +NcVar_add_scalar_att(double) + +NcBool NcVar::add_att(NcToken aname, float val) +{ + if (! the_file->define_mode()) + return FALSE; + float fval = (float) val; // workaround for bug, val passed as double?? + if (nc_put_att_float(the_file->id(), the_id, aname, (nc_type) ncFloat, + 1, &fval) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcBool NcVar::add_att(NcToken aname, const char* val) +{ + if (! the_file->define_mode()) + return FALSE; + if (nc_put_att_text(the_file->id(), the_id, aname, + strlen(val), val) != NC_NOERR) + return FALSE; + return TRUE; +} + +#define NcVar_add_vector_att(TYPE) \ +NcBool NcVar::add_att(NcToken aname, int len, const TYPE* vals) \ +{ \ + if (! the_file->define_mode()) \ + return FALSE; \ + if (NcError::set_err( \ + makename2(nc_put_att_,TYPE) (the_file->id(), the_id, aname, (nc_type) NcTypeEnum(TYPE), \ + len, vals) \ + ) != NC_NOERR) \ + return FALSE; \ + return TRUE; \ +} + +NcBool NcVar::add_att(NcToken aname, int len, const ncbyte* vals) +{ + if (! the_file->define_mode()) + return FALSE; + if (NcError::set_err( + nc_put_att_schar (the_file->id(), the_id, aname, (nc_type) NcTypeEnum(ncbyte), + len, vals) + ) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcBool NcVar::add_att(NcToken aname, int len, const char* vals) +{ + if (! the_file->define_mode()) + return FALSE; + if (NcError::set_err( + nc_put_att_text (the_file->id(), the_id, aname, + len, vals) + ) != NC_NOERR) + return FALSE; + return TRUE; +} + +NcVar_add_vector_att(short) +NcVar_add_vector_att(int) +NcVar_add_vector_att(long) +NcVar_add_vector_att(float) +NcVar_add_vector_att(double) + +NcBool NcVar::rename(NcToken newname) +{ + if (strlen(newname) > strlen(the_name)) { + if (! the_file->define_mode()) + return FALSE; + } + NcBool ret = NcError::set_err( + nc_rename_var(the_file->id(), the_id, newname) + ) == NC_NOERR; + if (ret) { + delete [] the_name; + the_name = new char [1 + strlen(newname)]; + strcpy(the_name, newname); + } + return ret; +} + +int NcVar::id( void ) const +{ + return the_id; +} + +NcBool NcVar::sync(void) +{ + if (the_name) { + delete [] the_name; + } + if (the_cur) { + delete [] the_cur; + } + if (cur_rec) { + delete [] cur_rec; + } + char nam[NC_MAX_NAME]; + if (the_file + && NcError::set_err( + nc_inq_varname(the_file->id(), the_id, nam) + ) == NC_NOERR) { + the_name = new char[1 + strlen(nam)]; + strcpy(the_name, nam); + } else { + the_name = 0; + return FALSE; + } + init_cur(); + return TRUE; +} + + +NcVar::NcVar(NcFile* nc, int id) + : NcTypedComponent(nc), the_id(id) +{ + char nam[NC_MAX_NAME]; + if (the_file + && NcError::set_err( + nc_inq_varname(the_file->id(), the_id, nam) + ) == NC_NOERR) { + the_name = new char[1 + strlen(nam)]; + strcpy(the_name, nam); + } else { + the_name = 0; + } + init_cur(); +} + +int NcVar::attnum( NcToken attrname ) const +{ + int num; + for(num=0; num < num_atts(); num++) { + char aname[NC_MAX_NAME]; + NcError::set_err( + nc_inq_attname(the_file->id(), the_id, num, aname) + ); + if (strcmp(aname, attrname) == 0) + break; + } + return num; // num_atts() if no such attribute +} + +NcToken NcVar::attname( int attnum ) const // caller must delete[] +{ + if (attnum < 0 || attnum >= num_atts()) + return 0; + char aname[NC_MAX_NAME]; + if (NcError::set_err( + nc_inq_attname(the_file->id(), the_id, attnum, aname) + ) != NC_NOERR) + return 0; + char* rname = new char[1 + strlen(aname)]; + strcpy(rname, aname); + return rname; +} + +void NcVar::init_cur( void ) +{ + the_cur = new long[NC_MAX_DIMS]; // *** don't know num_dims() yet? + cur_rec = new long[NC_MAX_DIMS]; // *** don't know num_dims() yet? + for(int i = 0; i < NC_MAX_DIMS; i++) { + the_cur[i] = 0; cur_rec[i] = 0; } +} + +NcAtt::NcAtt(NcFile* nc, const NcVar* var, NcToken name) + : NcTypedComponent(nc), the_variable(var) +{ + the_name = new char[1 + strlen(name)]; + strcpy(the_name, name); +} + +NcAtt::NcAtt(NcFile* nc, NcToken name) + : NcTypedComponent(nc), the_variable(NULL) +{ + the_name = new char[1 + strlen(name)]; + strcpy(the_name, name); +} + +NcAtt::~NcAtt( void ) +{ + delete [] the_name; +} + +NcToken NcAtt::name( void ) const +{ + return the_name; +} + +NcType NcAtt::type( void ) const +{ + nc_type typ; + NcError::set_err( + nc_inq_atttype(the_file->id(), the_variable->id(), the_name, &typ) + ); + return (NcType) typ; +} + +long NcAtt::num_vals( void ) const +{ + size_t len; + NcError::set_err( + nc_inq_attlen(the_file->id(), the_variable->id(), the_name, &len) + ); + return len; +} + +NcBool NcAtt::is_valid( void ) const +{ + int num; + return the_file->is_valid() && + (the_variable->id() == NC_GLOBAL || the_variable->is_valid()) && + NcError::set_err( + nc_inq_attid(the_file->id(), the_variable->id(), the_name, &num) + ) == NC_NOERR; +} + +NcValues* NcAtt::values( void ) const +{ + NcValues* valp = get_space(); + int status; + switch (type()) { + case ncFloat: + status = NcError::set_err( + nc_get_att_float(the_file->id(), the_variable->id(), the_name, + (float *)valp->base()) + ); + break; + case ncDouble: + status = NcError::set_err( + nc_get_att_double(the_file->id(), the_variable->id(), the_name, + (double *)valp->base()) + ); + break; + case ncInt: + status = NcError::set_err( + nc_get_att_int(the_file->id(), the_variable->id(), the_name, + (int *)valp->base()) + ); + break; + case ncShort: + status = NcError::set_err( + nc_get_att_short(the_file->id(), the_variable->id(), the_name, + (short *)valp->base()) + ); + break; + case ncByte: + status = NcError::set_err( + nc_get_att_schar(the_file->id(), the_variable->id(), the_name, + (signed char *)valp->base()) + ); + break; + case ncChar: + status = NcError::set_err( + nc_get_att_text(the_file->id(), the_variable->id(), the_name, + (char *)valp->base()) + ); + break; + case ncNoType: + default: + return 0; + } + if (status != NC_NOERR) { + delete valp; + return 0; + } + return valp; +} + +NcBool NcAtt::rename(NcToken newname) +{ + if (strlen(newname) > strlen(the_name)) { + if (! the_file->define_mode()) + return FALSE; + } + return NcError::set_err( + nc_rename_att(the_file->id(), the_variable->id(), + the_name, newname) + ) == NC_NOERR; +} + +NcBool NcAtt::remove( void ) +{ + if (! the_file->define_mode()) + return FALSE; + return NcError::set_err( + nc_del_att(the_file->id(), the_variable->id(), the_name) + ) == NC_NOERR; +} + +NcError::NcError( Behavior b ) +{ + the_old_state = ncopts; // global variable in version 2 C interface + the_old_err = ncerr; // global variable in version 2 C interface + ncopts = (int) b; +} + +NcError::~NcError( void ) +{ + ncopts = the_old_state; + ncerr = the_old_err; +} + +int NcError::get_err( void ) // returns most recent error +{ + return ncerr; +} + +int NcError::set_err (int err) +{ + ncerr = err; + // Check ncopts and handle appropriately + if(err != NC_NOERR) { + if(ncopts == verbose_nonfatal || ncopts == verbose_fatal) { + std::cout << nc_strerror(err) << std::endl; + } + if(ncopts == silent_fatal || ncopts == verbose_fatal) { + exit(ncopts); + } + } + return err; +} + +int NcError::ncerr = NC_NOERR; +int NcError::ncopts = NcError::verbose_fatal ; // for backward compatibility diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.hh b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.hh new file mode 100644 index 000000000..c93d8886e --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdf.hh @@ -0,0 +1 @@ +#include diff --git a/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdfcpp.h b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdfcpp.h new file mode 100644 index 000000000..2f828e0b8 --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/netcdf-cxx-4.2/netcdfcpp.h @@ -0,0 +1,469 @@ +/********************************************************************* + * Copyright 1992, University Corporation for Atmospheric Research + * See netcdf/README file for copying and redistribution conditions. + * + * Purpose: C++ class interface for netCDF + * + * $Header: /upc/share/CVS/netcdf-3/cxx/netcdfcpp.h,v 1.15 2009/03/10 15:20:54 russ Exp $ + *********************************************************************/ + +#ifndef NETCDF_HH +#define NETCDF_HH + +#include "ncvalues.h" // arrays that know their element type + +typedef const char* NcToken; // names for netCDF objects +typedef unsigned int NcBool; // many members return 0 on failure + +class NcDim; // dimensions +class NcVar; // variables +class NcAtt; // attributes + +/* + * *********************************************************************** + * A netCDF file. + * *********************************************************************** + */ +class NcFile +{ + public: + + virtual ~NcFile( void ); + + enum FileMode { + ReadOnly, // file exists, open read-only + Write, // file exists, open for writing + Replace, // create new file, even if already exists + New // create new file, fail if already exists + }; + + enum FileFormat { + Classic, // netCDF classic format (i.e. version 1 format) + Offset64Bits, // netCDF 64-bit offset format + Netcdf4, // netCDF-4 using HDF5 format + Netcdf4Classic, // netCDF-4 using HDF5 format using only netCDF-3 calls + BadFormat + }; + + NcFile( const char * path, FileMode = ReadOnly , + size_t *bufrsizeptr = NULL, // optional tuning parameters + size_t initialsize = 0, + FileFormat = Classic ); + + NcBool is_valid( void ) const; // opened OK in ctr, still valid + + int num_dims( void ) const; // number of dimensions + int num_vars( void ) const; // number of variables + int num_atts( void ) const; // number of (global) attributes + + NcDim* get_dim( NcToken ) const; // dimension by name + NcVar* get_var( NcToken ) const; // variable by name + NcAtt* get_att( NcToken ) const; // global attribute by name + + NcDim* get_dim( int ) const; // n-th dimension + NcVar* get_var( int ) const; // n-th variable + NcAtt* get_att( int ) const; // n-th global attribute + NcDim* rec_dim( void ) const; // unlimited dimension, if any + + // Add new dimensions, variables, global attributes. + // These put the file in "define" mode, so could be expensive. + virtual NcDim* add_dim( NcToken dimname, long dimsize ); + virtual NcDim* add_dim( NcToken dimname ); // unlimited + + virtual NcVar* add_var( NcToken varname, NcType type, // scalar + const NcDim* dim0=0, // 1-dim + const NcDim* dim1=0, // 2-dim + const NcDim* dim2=0, // 3-dim + const NcDim* dim3=0, // 4-dim + const NcDim* dim4=0 ); // 5-dim + virtual NcVar* add_var( NcToken varname, NcType type, // n-dim + int ndims, const NcDim** dims ); + + NcBool add_att( NcToken attname, char ); // scalar attributes + NcBool add_att( NcToken attname, ncbyte ); + NcBool add_att( NcToken attname, short ); + NcBool add_att( NcToken attname, long ); + NcBool add_att( NcToken attname, int ); + NcBool add_att( NcToken attname, float ); + NcBool add_att( NcToken attname, double ); + NcBool add_att( NcToken attname, const char*); // string attribute + NcBool add_att( NcToken attname, int, const char* ); // vector attributes + NcBool add_att( NcToken attname, int, const ncbyte* ); + NcBool add_att( NcToken attname, int, const short* ); + NcBool add_att( NcToken attname, int, const long* ); + NcBool add_att( NcToken attname, int, const int* ); + NcBool add_att( NcToken attname, int, const float* ); + NcBool add_att( NcToken attname, int, const double* ); + + enum FillMode { + Fill = NC_FILL, // prefill (default) + NoFill = NC_NOFILL, // don't prefill + Bad + }; + + NcBool set_fill( FillMode = Fill ); // set fill-mode + FillMode get_fill( void ) const; // get fill-mode + FileFormat get_format( void ) const; // get format version + + NcBool sync( void ); // synchronize to disk + NcBool close( void ); // to close earlier than dtr + NcBool abort( void ); // back out of bad defines + + // Needed by other Nc classes, but users will not need them + NcBool define_mode( void ); // leaves in define mode, if possible + NcBool data_mode( void ); // leaves in data mode, if possible + int id( void ) const; // id used by C interface + + protected: + int the_id; + int in_define_mode; + FillMode the_fill_mode; + NcDim** dimensions; + NcVar** variables; + NcVar* globalv; // "variable" for global attributes +}; + +/* + * For backward compatibility. We used to derive NcOldFile and NcNewFile + * from NcFile, but that was over-zealous inheritance. + */ +#define NcOldFile NcFile +#define NcNewFile NcFile +#define Clobber Replace +#define NoClobber New + +/* + * ********************************************************************** + * A netCDF dimension, with a name and a size. These are only created + * by NcFile member functions, because they cannot exist independently + * of an open netCDF file. + * ********************************************************************** + */ +class NcDim +{ + public: + NcToken name( void ) const; + long size( void ) const; + NcBool is_valid( void ) const; + NcBool is_unlimited( void ) const; + NcBool rename( NcToken newname ); + int id( void ) const; + NcBool sync( void ); + + private: + NcFile *the_file; // not const because of rename + int the_id; + char *the_name; + + NcDim(NcFile*, int num); // existing dimension + NcDim(NcFile*, NcToken name, long sz); // defines a new dim + virtual ~NcDim( void ); + + // to construct dimensions, since constructor is private + friend class NcFile; +}; + + +/* + * ********************************************************************** + * Abstract base class for a netCDF variable or attribute, both of which + * have a name, a type, and associated values. These only exist as + * components of an open netCDF file. + * ********************************************************************** + */ +class NcTypedComponent +{ + public: + virtual ~NcTypedComponent( void ) {} + virtual NcToken name( void ) const = 0; + virtual NcType type( void ) const = 0; + virtual NcBool is_valid( void ) const = 0; + virtual long num_vals( void ) const = 0; + virtual NcBool rename( NcToken newname ) = 0; + virtual NcValues* values( void ) const = 0; // block of all values + + // The following member functions provide conversions from the value + // type to a desired basic type. If the value is out of range, + // the default "fill-value" for the appropriate type is returned. + + virtual ncbyte as_ncbyte( long n ) const; // nth value as an unsgnd char + virtual char as_char( long n ) const; // nth value as char + virtual short as_short( long n ) const; // nth value as short + virtual int as_int( long n ) const; // nth value as int + virtual int as_nclong( long n ) const; // nth value as nclong (deprecated) + virtual long as_long( long n ) const; // nth value as long + virtual float as_float( long n ) const; // nth value as floating-point + virtual double as_double( long n ) const; // nth value as double + virtual char* as_string( long n ) const; // nth value as string + + protected: + NcFile *the_file; + NcTypedComponent( NcFile* ); + virtual NcValues* get_space( long numVals = 0 ) const; // to hold values +}; + + +/* + * ********************************************************************** + * netCDF variables. In addition to a name and a type, these also have + * a shape, given by a list of dimensions + * ********************************************************************** + */ +class NcVar : public NcTypedComponent +{ + public: + virtual ~NcVar( void ); + NcToken name( void ) const; + NcType type( void ) const; + NcBool is_valid( void ) const; + int num_dims( void ) const; // dimensionality of variable + NcDim* get_dim( int ) const; // n-th dimension + long* edges( void ) const; // dimension sizes + int num_atts( void ) const; // number of attributes + NcAtt* get_att( NcToken ) const; // attribute by name + NcAtt* get_att( int ) const; // n-th attribute + long num_vals( void ) const; // product of dimension sizes + NcValues* values( void ) const; // all values + + // Put scalar or 1, ..., 5 dimensional arrays by providing enough + // arguments. Arguments are edge lengths, and their number must not + // exceed variable's dimensionality. Start corner is [0,0,..., 0] by + // default, but may be reset using the set_cur() member. FALSE is + // returned if type of values does not match type for variable. + NcBool put( const ncbyte* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const char* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const short* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const int* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const long* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const float* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + NcBool put( const double* vals, + long c0=0, long c1=0, long c2=0, long c3=0, long c4=0 ); + + // Put n-dimensional arrays, starting at [0, 0, ..., 0] by default, + // may be reset with set_cur(). + NcBool put( const ncbyte* vals, const long* counts ); + NcBool put( const char* vals, const long* counts ); + NcBool put( const short* vals, const long* counts ); + NcBool put( const int* vals, const long* counts ); + NcBool put( const long* vals, const long* counts ); + NcBool put( const float* vals, const long* counts ); + NcBool put( const double* vals, const long* counts ); + + // Get scalar or 1, ..., 5 dimensional arrays by providing enough + // arguments. Arguments are edge lengths, and their number must not + // exceed variable's dimensionality. Start corner is [0,0,..., 0] by + // default, but may be reset using the set_cur() member. + NcBool get( ncbyte* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( char* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( short* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( int* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( long* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( float* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + NcBool get( double* vals, long c0=0, long c1=0, + long c2=0, long c3=0, long c4=0 ) const; + + // Get n-dimensional arrays, starting at [0, 0, ..., 0] by default, + // may be reset with set_cur(). + NcBool get( ncbyte* vals, const long* counts ) const; + NcBool get( char* vals, const long* counts ) const; + NcBool get( short* vals, const long* counts ) const; + NcBool get( int* vals, const long* counts ) const; + NcBool get( long* vals, const long* counts ) const; + NcBool get( float* vals, const long* counts ) const; + NcBool get( double* vals, const long* counts ) const; + + NcBool set_cur(long c0=-1, long c1=-1, long c2=-1, + long c3=-1, long c4=-1); + NcBool set_cur(long* cur); + + // these put file in define mode, so could be expensive + NcBool add_att( NcToken, char ); // add scalar attributes + NcBool add_att( NcToken, ncbyte ); + NcBool add_att( NcToken, short ); + NcBool add_att( NcToken, int ); + NcBool add_att( NcToken, long ); + NcBool add_att( NcToken, float ); + NcBool add_att( NcToken, double ); + NcBool add_att( NcToken, const char* ); // string attribute + NcBool add_att( NcToken, int, const char* ); // vector attributes + NcBool add_att( NcToken, int, const ncbyte* ); + NcBool add_att( NcToken, int, const short* ); + NcBool add_att( NcToken, int, const int* ); + NcBool add_att( NcToken, int, const long* ); + NcBool add_att( NcToken, int, const float* ); + NcBool add_att( NcToken, int, const double* ); + + NcBool rename( NcToken newname ); + + long rec_size ( void ); // number of values per record + long rec_size ( NcDim* ); // number of values per dimension slice + + // Though following are intended for record variables, they also work + // for other variables, using first dimension as record dimension. + + // Get a record's worth of data + NcValues *get_rec(void); // get current record + NcValues *get_rec(long rec); // get specified record + NcValues *get_rec(NcDim* d); // get current dimension slice + NcValues *get_rec(NcDim* d, long slice); // get specified dimension slice + + // Put a record's worth of data in current record + NcBool put_rec( const ncbyte* vals ); + NcBool put_rec( const char* vals ); + NcBool put_rec( const short* vals ); + NcBool put_rec( const int* vals ); + NcBool put_rec( const long* vals ); + NcBool put_rec( const float* vals ); + NcBool put_rec( const double* vals ); + + // Put a dimension slice worth of data in current dimension slice + NcBool put_rec( NcDim* d, const ncbyte* vals ); + NcBool put_rec( NcDim* d, const char* vals ); + NcBool put_rec( NcDim* d, const short* vals ); + NcBool put_rec( NcDim* d, const int* vals ); + NcBool put_rec( NcDim* d, const long* vals ); + NcBool put_rec( NcDim* d, const float* vals ); + NcBool put_rec( NcDim* d, const double* vals ); + + // Put a record's worth of data in specified record + NcBool put_rec( const ncbyte* vals, long rec ); + NcBool put_rec( const char* vals, long rec ); + NcBool put_rec( const short* vals, long rec ); + NcBool put_rec( const int* vals, long rec ); + NcBool put_rec( const long* vals, long rec ); + NcBool put_rec( const float* vals, long rec ); + NcBool put_rec( const double* vals, long rec ); + + // Put a dimension slice worth of data in specified dimension slice + NcBool put_rec( NcDim* d, const ncbyte* vals, long slice ); + NcBool put_rec( NcDim* d, const char* vals, long slice ); + NcBool put_rec( NcDim* d, const short* vals, long slice ); + NcBool put_rec( NcDim* d, const int* vals, long slice ); + NcBool put_rec( NcDim* d, const long* vals, long slice ); + NcBool put_rec( NcDim* d, const float* vals, long slice ); + NcBool put_rec( NcDim* d, const double* vals, long slice ); + + // Get first record index corresponding to specified key value(s) + long get_index( const ncbyte* vals ); + long get_index( const char* vals ); + long get_index( const short* vals ); + long get_index( const int* vals ); + long get_index( const long* vals ); + long get_index( const float* vals ); + long get_index( const double* vals ); + + // Get first index of specified dimension corresponding to key values + long get_index( NcDim* d, const ncbyte* vals ); + long get_index( NcDim* d, const char* vals ); + long get_index( NcDim* d, const short* vals ); + long get_index( NcDim* d, const int* vals ); + long get_index( NcDim* d, const long* vals ); + long get_index( NcDim* d, const float* vals ); + long get_index( NcDim* d, const double* vals ); + + // Set current record + void set_rec ( long rec ); + // Set current dimension slice + void set_rec ( NcDim* d, long slice ); + + int id( void ) const; // rarely needed, C interface id + NcBool sync( void ); + + private: + int dim_to_index(NcDim* rdim); + int the_id; + long* the_cur; + char* the_name; + long* cur_rec; + + // private constructors because only an NcFile creates these + NcVar( void ); + NcVar(NcFile*, int); + + int attnum( NcToken attname ) const; + NcToken attname( int attnum ) const; + void init_cur( void ); + + // to make variables, since constructor is private + friend class NcFile; +}; + + +/* + * ********************************************************************** + * netCDF attributes. In addition to a name and a type, these are each + * associated with a specific variable, or are global to the file. + * ********************************************************************** + */ +class NcAtt : public NcTypedComponent +{ + public: + virtual ~NcAtt( void ); + NcToken name( void ) const; + NcType type( void ) const; + NcBool is_valid( void ) const; + long num_vals( void ) const; + NcValues* values( void ) const; + NcBool rename( NcToken newname ); + NcBool remove( void ); + + private: + const NcVar* the_variable; + char* the_name; + // protected constructors because only NcVars and NcFiles create + // attributes + NcAtt( NcFile*, const NcVar*, NcToken); + NcAtt( NcFile*, NcToken); // global attribute + + // To make attributes, since constructor is private + friend class NcFile; + friend NcAtt* NcVar::get_att( NcToken ) const; +}; + + +/* + * ********************************************************************** + * To control error handling. Declaring an NcError object temporarily + * changes the error-handling behavior until the object is destroyed, at + * which time the previous error-handling behavior is restored. + * ********************************************************************** + */ +class NcError { + public: + enum Behavior { + silent_nonfatal = 0, + silent_fatal = 1, + verbose_nonfatal = 2, + verbose_fatal = 3 + }; + + // constructor saves previous error state, sets new state + NcError( Behavior b = verbose_fatal ); + + // destructor restores previous error state + virtual ~NcError( void ); + + int get_err( void ); // returns most recent error number + const char* get_errmsg( void ) {return nc_strerror(get_err());} + static int set_err( int err ); + + private: + int the_old_state; + int the_old_err; + static int ncopts; + static int ncerr; +}; + +#endif /* NETCDF_HH */ diff --git a/grid_gen/mesh_conversion_tools/netcdf_utils.cpp b/mesh_tools/mesh_conversion_tools/netcdf_utils.cpp similarity index 100% rename from grid_gen/mesh_conversion_tools/netcdf_utils.cpp rename to mesh_tools/mesh_conversion_tools/netcdf_utils.cpp diff --git a/grid_gen/mesh_conversion_tools/netcdf_utils.h b/mesh_tools/mesh_conversion_tools/netcdf_utils.h similarity index 100% rename from grid_gen/mesh_conversion_tools/netcdf_utils.h rename to mesh_tools/mesh_conversion_tools/netcdf_utils.h diff --git a/grid_gen/mesh_conversion_tools/pnt.h b/mesh_tools/mesh_conversion_tools/pnt.h similarity index 100% rename from grid_gen/mesh_conversion_tools/pnt.h rename to mesh_tools/mesh_conversion_tools/pnt.h diff --git a/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson b/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson new file mode 100644 index 000000000..d02d1cb6a --- /dev/null +++ b/mesh_tools/mesh_conversion_tools/test/Arctic_Ocean.geojson @@ -0,0 +1,7638 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": { + "name": "Arctic Ocean", + "tags": "Arctic_Ocean;Arctic_Basin", + "object": "region", + "component": "ocean", + "author": "http://www.marineregions.org/downloads.php#iho" + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -114.848345, + 77.854709 + ], + [ + -114.730291, + 77.818882 + ], + [ + -114.709164, + 77.813600 + ], + [ + -114.662509, + 77.803864 + ], + [ + -114.653505, + 77.801521 + ], + [ + -115.950298, + 77.452424 + ], + [ + -116.082091, + 77.488309 + ], + [ + -116.203336, + 77.519991 + ], + [ + -116.351100, + 77.539155 + ], + [ + -116.487782, + 77.550264 + ], + [ + -116.536118, + 77.544436 + ], + [ + -116.585827, + 77.540545 + ], + [ + -116.647509, + 77.537764 + ], + [ + -116.754182, + 77.534427 + ], + [ + -116.833073, + 77.533600 + ], + [ + -116.875000, + 77.534991 + ], + [ + -116.907527, + 77.527827 + ], + [ + -116.879173, + 77.517764 + ], + [ + -116.851100, + 77.516664 + ], + [ + -116.763336, + 77.514018 + ], + [ + -116.785282, + 77.499145 + ], + [ + -116.894445, + 77.473309 + ], + [ + -116.919727, + 77.470536 + ], + [ + -116.991673, + 77.466664 + ], + [ + -117.038336, + 77.471000 + ], + [ + -117.073545, + 77.475464 + ], + [ + -117.148755, + 77.455273 + ], + [ + -117.002918, + 77.396664 + ], + [ + -116.975827, + 77.393327 + ], + [ + -116.899173, + 77.399427 + ], + [ + -116.870827, + 77.400818 + ], + [ + -116.741382, + 77.395264 + ], + [ + -116.653618, + 77.385200 + ], + [ + -116.787355, + 77.318318 + ], + [ + -116.848345, + 77.315809 + ], + [ + -116.876391, + 77.318055 + ], + [ + -116.946664, + 77.329436 + ], + [ + -117.007500, + 77.343036 + ], + [ + -117.060818, + 77.353318 + ], + [ + -117.119455, + 77.359982 + ], + [ + -117.150836, + 77.360118 + ], + [ + -117.177564, + 77.343664 + ], + [ + -117.154164, + 77.332491 + ], + [ + -117.117982, + 77.338391 + ], + [ + -117.060818, + 77.326664 + ], + [ + -117.020618, + 77.299491 + ], + [ + -117.276400, + 77.289155 + ], + [ + -117.450845, + 77.312191 + ], + [ + -117.611936, + 77.327773 + ], + [ + -117.731518, + 77.339991 + ], + [ + -117.753900, + 77.349155 + ], + [ + -117.774591, + 77.360400 + ], + [ + -117.858891, + 77.386509 + ], + [ + -117.911118, + 77.386936 + ], + [ + -118.097227, + 77.378591 + ], + [ + -118.130336, + 77.366091 + ], + [ + -118.165836, + 77.355255 + ], + [ + -118.197218, + 77.354982 + ], + [ + -118.224718, + 77.356091 + ], + [ + -118.449718, + 77.358873 + ], + [ + -118.651109, + 77.360536 + ], + [ + -118.732500, + 77.355545 + ], + [ + -118.757236, + 77.352482 + ], + [ + -118.870000, + 77.333882 + ], + [ + -118.893891, + 77.327482 + ], + [ + -118.916945, + 77.322491 + ], + [ + -118.941382, + 77.319718 + ], + [ + -119.001109, + 77.321109 + ], + [ + -119.086673, + 77.326664 + ], + [ + -119.114436, + 77.327482 + ], + [ + -119.153345, + 77.325818 + ], + [ + -119.201109, + 77.313036 + ], + [ + -119.222227, + 77.306364 + ], + [ + -119.260700, + 77.292209 + ], + [ + -119.295836, + 77.276655 + ], + [ + -119.315827, + 77.258045 + ], + [ + -119.339864, + 77.233873 + ], + [ + -119.355755, + 77.209291 + ], + [ + -119.389182, + 77.184418 + ], + [ + -119.410827, + 77.178591 + ], + [ + -119.433318, + 77.173600 + ], + [ + -119.600564, + 77.145827 + ], + [ + -119.776400, + 77.106091 + ], + [ + -119.818891, + 77.093873 + ], + [ + -119.835418, + 77.068664 + ], + [ + -119.920273, + 77.023609 + ], + [ + -119.955555, + 77.011245 + ], + [ + -119.976936, + 77.013318 + ], + [ + -119.997218, + 77.016391 + ], + [ + -120.030827, + 77.014300 + ], + [ + -120.069164, + 77.008045 + ], + [ + -120.091382, + 77.003055 + ], + [ + -120.365282, + 76.836109 + ], + [ + -120.374436, + 76.807964 + ], + [ + -120.401673, + 76.797209 + ], + [ + -120.581127, + 76.749418 + ], + [ + -120.604173, + 76.746373 + ], + [ + -120.633327, + 76.747482 + ], + [ + -120.669155, + 76.751100 + ], + [ + -120.766109, + 76.743591 + ], + [ + -120.812209, + 76.737200 + ], + [ + -120.840136, + 76.728036 + ], + [ + -120.861936, + 76.711927 + ], + [ + -120.892227, + 76.696091 + ], + [ + -120.923318, + 76.689973 + ], + [ + -121.056664, + 76.671373 + ], + [ + -121.079455, + 76.668318 + ], + [ + -121.109845, + 76.670818 + ], + [ + -121.212509, + 76.649718 + ], + [ + -121.311527, + 76.591791 + ], + [ + -121.306955, + 76.578327 + ], + [ + -121.312209, + 76.572491 + ], + [ + -121.421945, + 76.493591 + ], + [ + -121.523755, + 76.440664 + ], + [ + -121.549991, + 76.434709 + ], + [ + -121.738055, + 76.421100 + ], + [ + -121.781955, + 76.420255 + ], + [ + -121.826682, + 76.422764 + ], + [ + -122.014718, + 76.432482 + ], + [ + -122.309436, + 76.408873 + ], + [ + -122.398900, + 76.396945 + ], + [ + -122.578609, + 76.353591 + ], + [ + -122.608327, + 76.345273 + ], + [ + -122.629436, + 76.333182 + ], + [ + -122.641400, + 76.293873 + ], + [ + -122.633900, + 76.267627 + ], + [ + -122.721118, + 76.231373 + ], + [ + -122.848618, + 76.208882 + ], + [ + -122.979173, + 76.125809 + ], + [ + -123.037782, + 76.084718 + ], + [ + -124.054720, + 75.960278 + ], + [ + -125.071657, + 75.834747 + ], + [ + -126.088595, + 75.708118 + ], + [ + -127.105533, + 75.580380 + ], + [ + -128.122471, + 75.451524 + ], + [ + -129.139408, + 75.321543 + ], + [ + -130.156346, + 75.190425 + ], + [ + -131.173284, + 75.058163 + ], + [ + -132.190221, + 74.924747 + ], + [ + -133.207159, + 74.790168 + ], + [ + -134.224097, + 74.654415 + ], + [ + -135.241035, + 74.517481 + ], + [ + -136.257972, + 74.379354 + ], + [ + -137.274910, + 74.240027 + ], + [ + -138.291848, + 74.099488 + ], + [ + -139.308786, + 73.957729 + ], + [ + -140.325723, + 73.814739 + ], + [ + -141.342661, + 73.670509 + ], + [ + -142.359599, + 73.525029 + ], + [ + -143.376537, + 73.378290 + ], + [ + -144.393474, + 73.230280 + ], + [ + -145.410412, + 73.080991 + ], + [ + -146.427350, + 72.930412 + ], + [ + -147.444288, + 72.778533 + ], + [ + -148.461225, + 72.625344 + ], + [ + -149.478163, + 72.470835 + ], + [ + -150.495101, + 72.314995 + ], + [ + -151.512039, + 72.157815 + ], + [ + -152.528976, + 71.999283 + ], + [ + -153.545914, + 71.839390 + ], + [ + -154.562852, + 71.678125 + ], + [ + -155.579790, + 71.515477 + ], + [ + -156.596727, + 71.351436 + ], + [ + -157.617060, + 71.362620 + ], + [ + -158.513610, + 71.372447 + ], + [ + -158.637393, + 71.373804 + ], + [ + -159.657726, + 71.384988 + ], + [ + -160.678060, + 71.396172 + ], + [ + -161.698393, + 71.407356 + ], + [ + -162.718726, + 71.418540 + ], + [ + -163.739059, + 71.429724 + ], + [ + -164.759392, + 71.440907 + ], + [ + -165.779725, + 71.452091 + ], + [ + -166.800058, + 71.463275 + ], + [ + -167.820391, + 71.474459 + ], + [ + -168.840724, + 71.485643 + ], + [ + -169.861057, + 71.496827 + ], + [ + -170.881390, + 71.508011 + ], + [ + -171.901723, + 71.519195 + ], + [ + -172.922056, + 71.530379 + ], + [ + -173.942389, + 71.541562 + ], + [ + -174.962722, + 71.552746 + ], + [ + -175.983055, + 71.563930 + ], + [ + -177.003388, + 71.575114 + ], + [ + -178.023721, + 71.586298 + ], + [ + -179.044055, + 71.597482 + ], + [ + -179.195555, + 71.581382 + ], + [ + -179.222500, + 71.564709 + ], + [ + -179.305300, + 71.551364 + ], + [ + -179.502255, + 71.566373 + ], + [ + -179.628600, + 71.577191 + ], + [ + -179.900773, + 71.548791 + ], + [ + -179.927245, + 71.535536 + ], + [ + -180.000000, + 71.535845 + ], + [ + -180.000000, + 90.000000 + ], + [ + 180.000000, + 90.000000 + ], + [ + 180.000000, + 71.535855 + ], + [ + 179.981627, + 71.536100 + ], + [ + 178.970764, + 71.821787 + ], + [ + 177.959900, + 72.103201 + ], + [ + 176.949036, + 72.380399 + ], + [ + 175.938173, + 72.653438 + ], + [ + 174.927309, + 72.922374 + ], + [ + 173.916445, + 73.187262 + ], + [ + 172.905582, + 73.448159 + ], + [ + 171.894718, + 73.705119 + ], + [ + 170.883855, + 73.958196 + ], + [ + 169.872991, + 74.207444 + ], + [ + 168.862127, + 74.452917 + ], + [ + 167.851264, + 74.694667 + ], + [ + 166.840400, + 74.932746 + ], + [ + 165.829536, + 75.167206 + ], + [ + 164.818673, + 75.398099 + ], + [ + 163.807809, + 75.625474 + ], + [ + 162.796945, + 75.849382 + ], + [ + 161.786082, + 76.069873 + ], + [ + 160.775218, + 76.286994 + ], + [ + 159.764355, + 76.500796 + ], + [ + 158.753491, + 76.711325 + ], + [ + 157.742627, + 76.918629 + ], + [ + 156.731764, + 77.122755 + ], + [ + 156.722745, + 77.132200 + ], + [ + 156.700809, + 77.136936 + ], + [ + 156.677464, + 77.140545 + ], + [ + 156.653045, + 77.143055 + ], + [ + 156.603027, + 77.146945 + ], + [ + 156.555545, + 77.148036 + ], + [ + 156.496855, + 77.147064 + ], + [ + 156.450255, + 77.140273 + ], + [ + 156.436355, + 77.132064 + ], + [ + 155.417109, + 77.080709 + ], + [ + 154.397864, + 77.029154 + ], + [ + 153.378618, + 76.977396 + ], + [ + 152.359373, + 76.925435 + ], + [ + 151.340127, + 76.873270 + ], + [ + 150.320882, + 76.820901 + ], + [ + 149.301636, + 76.768327 + ], + [ + 149.113864, + 76.755827 + ], + [ + 148.982455, + 76.752209 + ], + [ + 148.746609, + 76.745818 + ], + [ + 147.645006, + 76.688409 + ], + [ + 146.543403, + 76.630756 + ], + [ + 145.441800, + 76.572857 + ], + [ + 144.340197, + 76.514713 + ], + [ + 143.238594, + 76.456321 + ], + [ + 142.136991, + 76.397681 + ], + [ + 141.035388, + 76.338793 + ], + [ + 139.933785, + 76.279654 + ], + [ + 138.832182, + 76.220264 + ], + [ + 137.831149, + 76.365653 + ], + [ + 136.830116, + 76.509537 + ], + [ + 135.829083, + 76.651931 + ], + [ + 134.828050, + 76.792849 + ], + [ + 133.827017, + 76.932305 + ], + [ + 132.825984, + 77.070314 + ], + [ + 131.824951, + 77.206890 + ], + [ + 130.823918, + 77.342047 + ], + [ + 129.822885, + 77.475799 + ], + [ + 128.821852, + 77.608160 + ], + [ + 127.820819, + 77.739145 + ], + [ + 126.819786, + 77.868765 + ], + [ + 125.818753, + 77.997036 + ], + [ + 124.817720, + 78.123970 + ], + [ + 123.816687, + 78.249580 + ], + [ + 122.815654, + 78.373881 + ], + [ + 121.814621, + 78.496885 + ], + [ + 120.813588, + 78.618604 + ], + [ + 119.812555, + 78.739053 + ], + [ + 118.811522, + 78.858243 + ], + [ + 117.810489, + 78.976187 + ], + [ + 116.809456, + 79.092898 + ], + [ + 115.808423, + 79.208388 + ], + [ + 114.807390, + 79.322670 + ], + [ + 113.806357, + 79.435755 + ], + [ + 112.805324, + 79.547656 + ], + [ + 111.804291, + 79.658385 + ], + [ + 110.803258, + 79.767954 + ], + [ + 109.802225, + 79.876373 + ], + [ + 108.801192, + 79.983656 + ], + [ + 107.800159, + 80.089814 + ], + [ + 106.799126, + 80.194857 + ], + [ + 105.798093, + 80.298798 + ], + [ + 104.797060, + 80.401647 + ], + [ + 103.796027, + 80.503416 + ], + [ + 102.794995, + 80.604116 + ], + [ + 101.793962, + 80.703758 + ], + [ + 100.792929, + 80.802352 + ], + [ + 99.791896, + 80.899909 + ], + [ + 98.790863, + 80.996440 + ], + [ + 97.789830, + 81.091956 + ], + [ + 96.788797, + 81.186466 + ], + [ + 95.787764, + 81.279982 + ], + [ + 95.732755, + 81.287200 + ], + [ + 95.699418, + 81.290264 + ], + [ + 95.653045, + 81.290545 + ], + [ + 95.527482, + 81.289427 + ], + [ + 94.515779, + 81.284637 + ], + [ + 93.504076, + 81.279843 + ], + [ + 92.492374, + 81.275047 + ], + [ + 91.621942, + 81.270919 + ], + [ + 91.480671, + 81.270249 + ], + [ + 90.468968, + 81.265448 + ], + [ + 89.457266, + 81.260644 + ], + [ + 88.445563, + 81.255838 + ], + [ + 87.433860, + 81.251029 + ], + [ + 86.422158, + 81.246217 + ], + [ + 85.410455, + 81.241403 + ], + [ + 84.398752, + 81.236586 + ], + [ + 83.387049, + 81.231766 + ], + [ + 82.375347, + 81.226944 + ], + [ + 81.363644, + 81.222119 + ], + [ + 80.351941, + 81.217292 + ], + [ + 79.340239, + 81.212462 + ], + [ + 78.328536, + 81.207629 + ], + [ + 77.316833, + 81.202794 + ], + [ + 76.305131, + 81.197956 + ], + [ + 75.293428, + 81.193115 + ], + [ + 74.281725, + 81.188272 + ], + [ + 73.270023, + 81.183426 + ], + [ + 72.258320, + 81.178578 + ], + [ + 71.246617, + 81.173727 + ], + [ + 70.234914, + 81.168873 + ], + [ + 69.223212, + 81.164016 + ], + [ + 68.211509, + 81.159157 + ], + [ + 67.199806, + 81.154295 + ], + [ + 66.188104, + 81.149431 + ], + [ + 65.176401, + 81.144564 + ], + [ + 63.791664, + 81.664155 + ], + [ + 63.782491, + 81.669436 + ], + [ + 63.749436, + 81.679427 + ], + [ + 63.701664, + 81.688873 + ], + [ + 63.639436, + 81.697755 + ], + [ + 63.600827, + 81.701936 + ], + [ + 63.463055, + 81.713882 + ], + [ + 63.336382, + 81.719145 + ], + [ + 63.296109, + 81.719982 + ], + [ + 63.217209, + 81.720264 + ], + [ + 63.105827, + 81.717209 + ], + [ + 62.965545, + 81.708600 + ], + [ + 62.764718, + 81.703045 + ], + [ + 62.793327, + 81.708600 + ], + [ + 62.805545, + 81.714709 + ], + [ + 62.788609, + 81.719709 + ], + [ + 62.755273, + 81.720827 + ], + [ + 62.716936, + 81.720264 + ], + [ + 62.680545, + 81.718873 + ], + [ + 62.388882, + 81.707491 + ], + [ + 62.247491, + 81.699709 + ], + [ + 60.834577, + 81.763704 + ], + [ + 59.421664, + 81.827209 + ], + [ + 59.256664, + 81.846936 + ], + [ + 59.208327, + 81.850536 + ], + [ + 59.164991, + 81.851927 + ], + [ + 59.087491, + 81.850818 + ], + [ + 58.700000, + 81.844145 + ], + [ + 58.627491, + 81.840818 + ], + [ + 58.115827, + 81.816936 + ], + [ + 58.043609, + 81.813309 + ], + [ + 57.976382, + 81.807755 + ], + [ + 57.947773, + 81.803036 + ], + [ + 56.908305, + 81.754937 + ], + [ + 55.868837, + 81.706558 + ], + [ + 54.829370, + 81.657896 + ], + [ + 53.789902, + 81.608951 + ], + [ + 52.750434, + 81.559721 + ], + [ + 51.710967, + 81.510205 + ], + [ + 50.671499, + 81.460400 + ], + [ + 49.632031, + 81.410305 + ], + [ + 48.566984, + 81.137230 + ], + [ + 47.501936, + 80.855545 + ], + [ + 47.464718, + 80.854709 + ], + [ + 47.198600, + 80.840273 + ], + [ + 47.112773, + 80.830555 + ], + [ + 47.091664, + 80.825000 + ], + [ + 47.101664, + 80.819718 + ], + [ + 47.130273, + 80.815809 + ], + [ + 47.170827, + 80.812764 + ], + [ + 47.193318, + 80.808591 + ], + [ + 47.203045, + 80.803036 + ], + [ + 47.194709, + 80.796100 + ], + [ + 47.026655, + 80.756945 + ], + [ + 46.999436, + 80.753327 + ], + [ + 46.833882, + 80.744136 + ], + [ + 46.800273, + 80.742482 + ], + [ + 46.763327, + 80.741927 + ], + [ + 46.685545, + 80.746645 + ], + [ + 46.537773, + 80.743864 + ], + [ + 46.436936, + 80.739155 + ], + [ + 46.402491, + 80.734155 + ], + [ + 46.429436, + 80.729427 + ], + [ + 46.517218, + 80.721927 + ], + [ + 46.385827, + 80.700545 + ], + [ + 46.358891, + 80.696927 + ], + [ + 46.298609, + 80.691655 + ], + [ + 45.987500, + 80.668045 + ], + [ + 45.954164, + 80.666382 + ], + [ + 45.733882, + 80.664427 + ], + [ + 45.534436, + 80.665545 + ], + [ + 45.464718, + 80.663036 + ], + [ + 45.227482, + 80.651091 + ], + [ + 45.161100, + 80.647491 + ], + [ + 45.000000, + 80.632673 + ], + [ + 44.880545, + 80.621091 + ], + [ + 44.860000, + 80.613455 + ], + [ + 43.839773, + 80.577079 + ], + [ + 42.819545, + 80.540702 + ], + [ + 41.799318, + 80.504326 + ], + [ + 40.779091, + 80.467950 + ], + [ + 39.758864, + 80.431574 + ], + [ + 38.738636, + 80.395198 + ], + [ + 37.718409, + 80.358822 + ], + [ + 36.698182, + 80.322446 + ], + [ + 35.677955, + 80.286070 + ], + [ + 34.657727, + 80.249694 + ], + [ + 33.637500, + 80.213318 + ], + [ + 33.624991, + 80.217755 + ], + [ + 33.602218, + 80.221373 + ], + [ + 33.523882, + 80.231091 + ], + [ + 33.424436, + 80.238873 + ], + [ + 33.386382, + 80.241091 + ], + [ + 33.317218, + 80.243045 + ], + [ + 33.286664, + 80.242482 + ], + [ + 33.261109, + 80.240264 + ], + [ + 33.030818, + 80.214427 + ], + [ + 32.893327, + 80.196364 + ], + [ + 32.751109, + 80.190536 + ], + [ + 32.591100, + 80.179982 + ], + [ + 32.330827, + 80.160536 + ], + [ + 32.121655, + 80.145264 + ], + [ + 31.791945, + 80.128036 + ], + [ + 31.493609, + 80.110809 + ], + [ + 31.476664, + 80.105818 + ], + [ + 31.449436, + 80.085818 + ], + [ + 30.395964, + 80.043113 + ], + [ + 29.342491, + 80.000225 + ], + [ + 28.289018, + 79.957154 + ], + [ + 27.235545, + 79.913900 + ], + [ + 27.165555, + 79.939700 + ], + [ + 27.121382, + 79.958327 + ], + [ + 27.101109, + 79.967482 + ], + [ + 27.145000, + 80.004436 + ], + [ + 27.167773, + 80.021927 + ], + [ + 27.229300, + 80.096518 + ], + [ + 27.182218, + 80.106936 + ], + [ + 27.146109, + 80.107755 + ], + [ + 26.977491, + 80.122755 + ], + [ + 26.908473, + 80.146800 + ], + [ + 26.800000, + 80.172209 + ], + [ + 26.637218, + 80.183318 + ], + [ + 26.598055, + 80.184709 + ], + [ + 26.242491, + 80.186373 + ], + [ + 26.081664, + 80.185809 + ], + [ + 25.893891, + 80.172764 + ], + [ + 25.863882, + 80.171645 + ], + [ + 25.725455, + 80.176227 + ], + [ + 25.698882, + 80.215818 + ], + [ + 25.543609, + 80.234418 + ], + [ + 25.508891, + 80.236373 + ], + [ + 25.468464, + 80.233600 + ], + [ + 25.450273, + 80.224991 + ], + [ + 25.262082, + 80.225682 + ], + [ + 25.228609, + 80.250545 + ], + [ + 25.255273, + 80.255827 + ], + [ + 25.309164, + 80.259718 + ], + [ + 25.345000, + 80.270200 + ], + [ + 25.289436, + 80.274991 + ], + [ + 25.183327, + 80.268600 + ], + [ + 25.105827, + 80.262209 + ], + [ + 24.982218, + 80.254991 + ], + [ + 24.810000, + 80.247755 + ], + [ + 24.779718, + 80.246645 + ], + [ + 24.801109, + 80.258609 + ], + [ + 24.848055, + 80.276927 + ], + [ + 24.888327, + 80.320973 + ], + [ + 24.860000, + 80.338318 + ], + [ + 24.836382, + 80.350818 + ], + [ + 24.814164, + 80.349427 + ], + [ + 24.786664, + 80.343318 + ], + [ + 24.765973, + 80.332073 + ], + [ + 24.750555, + 80.308591 + ], + [ + 24.732082, + 80.291645 + ], + [ + 24.555827, + 80.256382 + ], + [ + 24.532500, + 80.253600 + ], + [ + 24.500555, + 80.254718 + ], + [ + 24.467218, + 80.264018 + ], + [ + 24.476664, + 80.296100 + ], + [ + 24.551936, + 80.306636 + ], + [ + 24.600000, + 80.312900 + ], + [ + 24.560273, + 80.333600 + ], + [ + 24.527218, + 80.341373 + ], + [ + 24.506109, + 80.344982 + ], + [ + 24.347773, + 80.367755 + ], + [ + 24.307500, + 80.368864 + ], + [ + 24.273609, + 80.368591 + ], + [ + 24.213055, + 80.366091 + ], + [ + 24.189436, + 80.363309 + ], + [ + 24.149164, + 80.352764 + ], + [ + 24.182218, + 80.336382 + ], + [ + 24.203327, + 80.333055 + ], + [ + 24.229436, + 80.330273 + ], + [ + 24.290836, + 80.325545 + ], + [ + 24.361109, + 80.325545 + ], + [ + 24.397909, + 80.317209 + ], + [ + 24.365000, + 80.289982 + ], + [ + 24.338055, + 80.283873 + ], + [ + 24.305827, + 80.284718 + ], + [ + 24.216936, + 80.294436 + ], + [ + 23.979436, + 80.308318 + ], + [ + 23.939436, + 80.309418 + ], + [ + 23.906664, + 80.304700 + ], + [ + 23.951664, + 80.293045 + ], + [ + 23.984991, + 80.285264 + ], + [ + 24.022564, + 80.271518 + ], + [ + 23.993882, + 80.269718 + ], + [ + 23.866664, + 80.278045 + ], + [ + 23.844164, + 80.280273 + ], + [ + 23.815555, + 80.288591 + ], + [ + 23.792500, + 80.300818 + ], + [ + 23.761391, + 80.304700 + ], + [ + 23.738055, + 80.297764 + ], + [ + 23.718055, + 80.256655 + ], + [ + 23.728745, + 80.229982 + ], + [ + 23.751391, + 80.207218 + ], + [ + 23.619718, + 80.142773 + ], + [ + 23.596664, + 80.135818 + ], + [ + 23.548609, + 80.129836 + ], + [ + 23.489164, + 80.154709 + ], + [ + 23.467355, + 80.172345 + ], + [ + 23.492073, + 80.190945 + ], + [ + 23.472909, + 80.206236 + ], + [ + 23.429436, + 80.207764 + ], + [ + 23.248055, + 80.192473 + ], + [ + 23.225273, + 80.189700 + ], + [ + 23.090273, + 80.164564 + ], + [ + 23.118327, + 80.153045 + ], + [ + 23.175827, + 80.136382 + ], + [ + 23.220482, + 80.117482 + ], + [ + 23.183055, + 80.113309 + ], + [ + 23.152773, + 80.115264 + ], + [ + 23.101391, + 80.120818 + ], + [ + 23.071664, + 80.126091 + ], + [ + 23.004164, + 80.156791 + ], + [ + 23.047500, + 80.244982 + ], + [ + 23.086936, + 80.252491 + ], + [ + 23.139718, + 80.256655 + ], + [ + 23.169718, + 80.257764 + ], + [ + 23.196109, + 80.259991 + ], + [ + 23.242491, + 80.265545 + ], + [ + 23.301800, + 80.280682 + ], + [ + 23.339718, + 80.342418 + ], + [ + 23.308891, + 80.349155 + ], + [ + 23.268609, + 80.350264 + ], + [ + 23.208045, + 80.360809 + ], + [ + 23.127082, + 80.382764 + ], + [ + 23.144718, + 80.392491 + ], + [ + 23.173045, + 80.398327 + ], + [ + 23.203327, + 80.399718 + ], + [ + 23.243882, + 80.398609 + ], + [ + 23.277500, + 80.399155 + ], + [ + 23.309445, + 80.404155 + ], + [ + 23.355827, + 80.426518 + ], + [ + 23.315000, + 80.444982 + ], + [ + 23.284718, + 80.450273 + ], + [ + 23.258055, + 80.453045 + ], + [ + 23.127773, + 80.461382 + ], + [ + 22.946936, + 80.476100 + ], + [ + 22.886936, + 80.490264 + ], + [ + 22.833882, + 80.436918 + ], + [ + 22.832500, + 80.407900 + ], + [ + 22.749164, + 80.324155 + ], + [ + 22.716936, + 80.325000 + ], + [ + 22.695000, + 80.328327 + ], + [ + 22.619236, + 80.348591 + ], + [ + 22.632773, + 80.369982 + ], + [ + 22.658055, + 80.384564 + ], + [ + 22.678400, + 80.412073 + ], + [ + 22.612218, + 80.426655 + ], + [ + 22.498055, + 80.429155 + ], + [ + 22.408327, + 80.426655 + ], + [ + 22.384991, + 80.423600 + ], + [ + 22.361109, + 80.410264 + ], + [ + 22.335764, + 80.358736 + ], + [ + 22.386664, + 80.328873 + ], + [ + 22.408609, + 80.325545 + ], + [ + 22.503191, + 80.319909 + ], + [ + 22.541391, + 80.314991 + ], + [ + 22.568536, + 80.296373 + ], + [ + 22.500000, + 80.275164 + ], + [ + 22.452355, + 80.261518 + ], + [ + 22.419164, + 80.169709 + ], + [ + 22.361664, + 80.037491 + ], + [ + 22.328473, + 80.033736 + ], + [ + 22.363882, + 80.001391 + ], + [ + 22.355000, + 79.995255 + ], + [ + 22.287500, + 79.981091 + ], + [ + 22.257500, + 79.978318 + ], + [ + 22.226382, + 79.979155 + ], + [ + 22.198882, + 79.984700 + ], + [ + 22.192636, + 80.019009 + ], + [ + 22.128055, + 80.075545 + ], + [ + 22.054164, + 80.108600 + ], + [ + 21.856109, + 80.143600 + ], + [ + 21.829718, + 80.146100 + ], + [ + 21.798055, + 80.146945 + ], + [ + 21.736664, + 80.140682 + ], + [ + 21.666936, + 80.112491 + ], + [ + 21.637500, + 80.111100 + ], + [ + 21.607982, + 80.121300 + ], + [ + 21.658882, + 80.144436 + ], + [ + 21.686664, + 80.154164 + ], + [ + 21.729718, + 80.168591 + ], + [ + 21.761109, + 80.177473 + ], + [ + 21.884300, + 80.202764 + ], + [ + 21.898327, + 80.217755 + ], + [ + 21.870409, + 80.258464 + ], + [ + 21.838327, + 80.271100 + ], + [ + 21.802773, + 80.272491 + ], + [ + 21.697218, + 80.273609 + ], + [ + 21.483327, + 80.266391 + ], + [ + 21.300555, + 80.239973 + ], + [ + 21.125273, + 80.216664 + ], + [ + 21.095827, + 80.215273 + ], + [ + 20.943327, + 80.212200 + ], + [ + 20.881936, + 80.211382 + ], + [ + 20.853327, + 80.211109 + ], + [ + 20.812427, + 80.219700 + ], + [ + 20.745691, + 80.267773 + ], + [ + 20.783336, + 80.286927 + ], + [ + 20.835136, + 80.306018 + ], + [ + 20.812773, + 80.311918 + ], + [ + 20.779445, + 80.311373 + ], + [ + 20.727218, + 80.306927 + ], + [ + 20.674436, + 80.298455 + ], + [ + 20.647773, + 80.292482 + ], + [ + 20.615555, + 80.293045 + ], + [ + 20.572500, + 80.302200 + ], + [ + 20.527773, + 80.320827 + ], + [ + 20.432027, + 80.397309 + ], + [ + 20.415827, + 80.413882 + ], + [ + 20.365827, + 80.419709 + ], + [ + 20.329445, + 80.421100 + ], + [ + 20.237773, + 80.419709 + ], + [ + 20.211382, + 80.417482 + ], + [ + 20.179436, + 80.412618 + ], + [ + 20.100555, + 80.405818 + ], + [ + 20.035136, + 80.463600 + ], + [ + 20.007500, + 80.469436 + ], + [ + 19.701109, + 80.499709 + ], + [ + 19.668882, + 80.501664 + ], + [ + 19.642491, + 80.499418 + ], + [ + 19.481936, + 80.462345 + ], + [ + 19.463327, + 80.454709 + ], + [ + 19.458745, + 80.420682 + ], + [ + 19.477355, + 80.394991 + ], + [ + 19.512218, + 80.387209 + ], + [ + 19.548609, + 80.386109 + ], + [ + 19.581936, + 80.386936 + ], + [ + 19.689991, + 80.395264 + ], + [ + 19.723327, + 80.396382 + ], + [ + 19.913745, + 80.376645 + ], + [ + 19.858609, + 80.339709 + ], + [ + 19.812773, + 80.275818 + ], + [ + 19.807218, + 80.231091 + ], + [ + 19.840827, + 80.220964 + ], + [ + 19.808609, + 80.212491 + ], + [ + 19.687500, + 80.213045 + ], + [ + 19.655273, + 80.213609 + ], + [ + 19.623745, + 80.218600 + ], + [ + 19.600827, + 80.224700 + ], + [ + 19.433609, + 80.286373 + ], + [ + 19.416518, + 80.299709 + ], + [ + 19.385555, + 80.314145 + ], + [ + 19.330273, + 80.325273 + ], + [ + 19.261664, + 80.334718 + ], + [ + 19.211382, + 80.340273 + ], + [ + 19.156664, + 80.345264 + ], + [ + 19.097218, + 80.349427 + ], + [ + 19.060827, + 80.350536 + ], + [ + 19.023891, + 80.350536 + ], + [ + 18.980000, + 80.336655 + ], + [ + 19.099164, + 80.258045 + ], + [ + 19.214582, + 80.196918 + ], + [ + 19.310555, + 80.174427 + ], + [ + 19.368882, + 80.169982 + ], + [ + 19.404991, + 80.170255 + ], + [ + 19.480545, + 80.168318 + ], + [ + 19.552082, + 80.162618 + ], + [ + 19.579855, + 80.149291 + ], + [ + 19.387500, + 80.107209 + ], + [ + 19.340555, + 80.086382 + ], + [ + 19.318882, + 80.083327 + ], + [ + 19.290000, + 80.081664 + ], + [ + 19.263336, + 80.084155 + ], + [ + 19.234164, + 80.089573 + ], + [ + 19.212636, + 80.102200 + ], + [ + 19.191245, + 80.114982 + ], + [ + 18.904164, + 80.187618 + ], + [ + 18.760973, + 80.187764 + ], + [ + 18.717355, + 80.159291 + ], + [ + 18.674436, + 80.158036 + ], + [ + 18.634027, + 80.161800 + ], + [ + 18.610555, + 80.167755 + ], + [ + 18.594164, + 80.184709 + ], + [ + 18.443464, + 80.181091 + ], + [ + 18.415555, + 80.172482 + ], + [ + 18.254718, + 80.173036 + ], + [ + 18.223327, + 80.174991 + ], + [ + 18.203327, + 80.184709 + ], + [ + 18.167500, + 80.185809 + ], + [ + 18.038327, + 80.185255 + ], + [ + 17.877909, + 80.155536 + ], + [ + 17.790827, + 80.127064 + ], + [ + 17.938327, + 80.125536 + ], + [ + 18.007500, + 80.128036 + ], + [ + 18.062773, + 80.126082 + ], + [ + 18.214822, + 80.100459 + ], + [ + 16.547642, + 80.034096 + ], + [ + 16.531109, + 80.042345 + ], + [ + 16.335000, + 80.060527 + ], + [ + 16.307773, + 80.062764 + ], + [ + 14.412899, + 80.208520 + ], + [ + 13.411241, + 80.285569 + ], + [ + 12.409584, + 80.362618 + ], + [ + 11.407926, + 80.439666 + ], + [ + 10.406269, + 80.516715 + ], + [ + 9.404611, + 80.593764 + ], + [ + 8.402954, + 80.670813 + ], + [ + 7.401296, + 80.747862 + ], + [ + 6.399639, + 80.824910 + ], + [ + 5.397981, + 80.901959 + ], + [ + 4.396324, + 80.979008 + ], + [ + 3.394666, + 81.056057 + ], + [ + 2.393009, + 81.133106 + ], + [ + 1.391352, + 81.210155 + ], + [ + 0.389694, + 81.287203 + ], + [ + -0.611963, + 81.364252 + ], + [ + -1.613621, + 81.441301 + ], + [ + -2.615278, + 81.518350 + ], + [ + -3.616936, + 81.595399 + ], + [ + -4.618593, + 81.672447 + ], + [ + -5.620251, + 81.749496 + ], + [ + -6.621908, + 81.826545 + ], + [ + -7.623566, + 81.903594 + ], + [ + -8.625223, + 81.980643 + ], + [ + -9.626881, + 82.057692 + ], + [ + -10.628538, + 82.134740 + ], + [ + -11.630196, + 82.211789 + ], + [ + -12.631853, + 82.288838 + ], + [ + -13.633510, + 82.365887 + ], + [ + -14.635168, + 82.442936 + ], + [ + -15.636825, + 82.519985 + ], + [ + -16.638483, + 82.597033 + ], + [ + -17.640140, + 82.674082 + ], + [ + -18.641798, + 82.751131 + ], + [ + -19.643455, + 82.828180 + ], + [ + -20.645113, + 82.905229 + ], + [ + -21.646770, + 82.982277 + ], + [ + -22.648428, + 83.059326 + ], + [ + -23.650085, + 83.136375 + ], + [ + -24.651743, + 83.213424 + ], + [ + -25.653400, + 83.290473 + ], + [ + -25.670000, + 83.298873 + ], + [ + -25.685000, + 83.303591 + ], + [ + -25.775555, + 83.325000 + ], + [ + -25.800282, + 83.330555 + ], + [ + -26.100000, + 83.369709 + ], + [ + -26.199718, + 83.379155 + ], + [ + -26.257782, + 83.383882 + ], + [ + -26.340555, + 83.388045 + ], + [ + -26.751664, + 83.421100 + ], + [ + -27.176664, + 83.450000 + ], + [ + -27.432827, + 83.466618 + ], + [ + -27.751109, + 83.477764 + ], + [ + -27.860836, + 83.481091 + ], + [ + -27.913891, + 83.481655 + ], + [ + -27.961945, + 83.479709 + ], + [ + -28.004445, + 83.474700 + ], + [ + -28.050555, + 83.471645 + ], + [ + -28.095000, + 83.469982 + ], + [ + -28.197218, + 83.466936 + ], + [ + -28.391945, + 83.462773 + ], + [ + -28.440282, + 83.456509 + ], + [ + -28.415836, + 83.448318 + ], + [ + -28.367218, + 83.443036 + ], + [ + -28.204445, + 83.434418 + ], + [ + -28.149727, + 83.431509 + ], + [ + -28.317500, + 83.419436 + ], + [ + -28.559718, + 83.416091 + ], + [ + -28.602500, + 83.416091 + ], + [ + -28.664718, + 83.423873 + ], + [ + -28.719164, + 83.431655 + ], + [ + -28.896109, + 83.462200 + ], + [ + -28.904682, + 83.471036 + ], + [ + -28.870000, + 83.477200 + ], + [ + -28.705273, + 83.474991 + ], + [ + -28.653891, + 83.475264 + ], + [ + -28.560000, + 83.478045 + ], + [ + -28.529164, + 83.482209 + ], + [ + -28.513336, + 83.490955 + ], + [ + -28.524173, + 83.499709 + ], + [ + -28.543336, + 83.504991 + ], + [ + -28.567500, + 83.508327 + ], + [ + -28.613055, + 83.511655 + ], + [ + -28.773336, + 83.513609 + ], + [ + -28.826945, + 83.514164 + ], + [ + -28.876391, + 83.512773 + ], + [ + -29.021664, + 83.507218 + ], + [ + -29.057500, + 83.503327 + ], + [ + -29.026945, + 83.488309 + ], + [ + -29.056664, + 83.481655 + ], + [ + -29.100000, + 83.477482 + ], + [ + -29.157218, + 83.479982 + ], + [ + -29.179445, + 83.482209 + ], + [ + -29.235555, + 83.492482 + ], + [ + -29.257227, + 83.500955 + ], + [ + -29.245555, + 83.509991 + ], + [ + -29.214582, + 83.518191 + ], + [ + -29.233327, + 83.525273 + ], + [ + -29.254718, + 83.529164 + ], + [ + -29.415555, + 83.541091 + ], + [ + -29.699445, + 83.566936 + ], + [ + -29.796673, + 83.575000 + ], + [ + -29.856945, + 83.578600 + ], + [ + -30.215000, + 83.596100 + ], + [ + -30.331945, + 83.600818 + ], + [ + -30.388336, + 83.602200 + ], + [ + -30.442773, + 83.602482 + ], + [ + -30.597218, + 83.600264 + ], + [ + -30.636391, + 83.598036 + ], + [ + -30.677773, + 83.592755 + ], + [ + -30.691945, + 83.587491 + ], + [ + -30.708336, + 83.583055 + ], + [ + -30.751391, + 83.578873 + ], + [ + -30.844445, + 83.573045 + ], + [ + -30.946945, + 83.569445 + ], + [ + -31.154164, + 83.567218 + ], + [ + -31.261673, + 83.569445 + ], + [ + -31.435273, + 83.575273 + ], + [ + -31.488055, + 83.578600 + ], + [ + -31.653327, + 83.591373 + ], + [ + -31.700555, + 83.595827 + ], + [ + -31.871945, + 83.596373 + ], + [ + -31.960827, + 83.591091 + ], + [ + -32.164445, + 83.578327 + ], + [ + -32.187500, + 83.575000 + ], + [ + -32.229718, + 83.570545 + ], + [ + -32.276945, + 83.568055 + ], + [ + -32.300282, + 83.570264 + ], + [ + -32.301391, + 83.589709 + ], + [ + -32.274718, + 83.596645 + ], + [ + -32.229718, + 83.599991 + ], + [ + -32.206945, + 83.606791 + ], + [ + -32.233609, + 83.611927 + ], + [ + -32.295282, + 83.614991 + ], + [ + -32.522227, + 83.622482 + ], + [ + -32.579173, + 83.623600 + ], + [ + -32.908055, + 83.620255 + ], + [ + -33.146109, + 83.616655 + ], + [ + -33.436664, + 83.610809 + ], + [ + -33.688609, + 83.604155 + ], + [ + -33.784445, + 83.599718 + ], + [ + -33.899727, + 83.592482 + ], + [ + -33.943327, + 83.588882 + ], + [ + -34.028609, + 83.579436 + ], + [ + -34.070000, + 83.570264 + ], + [ + -34.081527, + 83.564009 + ], + [ + -34.077227, + 83.550809 + ], + [ + -34.035282, + 83.534718 + ], + [ + -34.014164, + 83.528873 + ], + [ + -33.940555, + 83.513882 + ], + [ + -33.815555, + 83.501391 + ], + [ + -33.784173, + 83.496373 + ], + [ + -33.760836, + 83.491364 + ], + [ + -33.747082, + 83.483182 + ], + [ + -33.760282, + 83.475264 + ], + [ + -33.785282, + 83.468318 + ], + [ + -33.837218, + 83.456645 + ], + [ + -33.858891, + 83.453600 + ], + [ + -33.882773, + 83.452482 + ], + [ + -33.917218, + 83.455264 + ], + [ + -34.195000, + 83.521927 + ], + [ + -34.282500, + 83.548873 + ], + [ + -34.298336, + 83.554427 + ], + [ + -34.305827, + 83.566373 + ], + [ + -34.307155, + 83.584018 + ], + [ + -34.326391, + 83.593318 + ], + [ + -34.342500, + 83.595536 + ], + [ + -34.364164, + 83.596936 + ], + [ + -34.410555, + 83.594145 + ], + [ + -34.440555, + 83.587491 + ], + [ + -34.455836, + 83.580418 + ], + [ + -34.457500, + 83.567218 + ], + [ + -34.461945, + 83.551227 + ], + [ + -34.483745, + 83.541791 + ], + [ + -34.512218, + 83.539982 + ], + [ + -34.541673, + 83.542482 + ], + [ + -34.572918, + 83.549855 + ], + [ + -34.583609, + 83.565264 + ], + [ + -34.636118, + 83.589427 + ], + [ + -34.649991, + 83.593873 + ], + [ + -34.682500, + 83.598873 + ], + [ + -34.726100, + 83.601382 + ], + [ + -35.754793, + 83.587455 + ], + [ + -36.783487, + 83.573498 + ], + [ + -37.812180, + 83.559510 + ], + [ + -38.840873, + 83.545492 + ], + [ + -39.869567, + 83.531444 + ], + [ + -40.898260, + 83.517366 + ], + [ + -41.926953, + 83.503257 + ], + [ + -42.955647, + 83.489117 + ], + [ + -43.984340, + 83.474947 + ], + [ + -45.013033, + 83.460746 + ], + [ + -46.041726, + 83.446514 + ], + [ + -47.070420, + 83.432251 + ], + [ + -48.099113, + 83.417958 + ], + [ + -49.127806, + 83.403633 + ], + [ + -50.156500, + 83.389278 + ], + [ + -51.185193, + 83.374891 + ], + [ + -52.213886, + 83.360473 + ], + [ + -53.242580, + 83.346024 + ], + [ + -54.271273, + 83.331544 + ], + [ + -55.299966, + 83.317032 + ], + [ + -56.328660, + 83.302489 + ], + [ + -57.357353, + 83.287914 + ], + [ + -58.386046, + 83.273308 + ], + [ + -59.414740, + 83.258670 + ], + [ + -60.443433, + 83.244001 + ], + [ + -61.472126, + 83.229299 + ], + [ + -62.500820, + 83.214566 + ], + [ + -63.529513, + 83.199801 + ], + [ + -64.558206, + 83.185004 + ], + [ + -65.586899, + 83.170175 + ], + [ + -66.615593, + 83.155313 + ], + [ + -67.644286, + 83.140420 + ], + [ + -68.672979, + 83.125494 + ], + [ + -69.701673, + 83.110536 + ], + [ + -69.748891, + 83.111927 + ], + [ + -69.812209, + 83.112200 + ], + [ + -70.001400, + 83.107755 + ], + [ + -70.111936, + 83.109418 + ], + [ + -70.160000, + 83.111373 + ], + [ + -70.260009, + 83.113873 + ], + [ + -70.373891, + 83.113309 + ], + [ + -70.470000, + 83.107482 + ], + [ + -70.585282, + 83.103318 + ], + [ + -70.694155, + 83.103591 + ], + [ + -70.887218, + 83.098036 + ], + [ + -71.125273, + 83.087491 + ], + [ + -71.425000, + 83.029436 + ], + [ + -71.481282, + 83.006864 + ], + [ + -71.306382, + 82.982209 + ], + [ + -71.080836, + 82.937482 + ], + [ + -70.961945, + 82.918591 + ], + [ + -70.904173, + 82.908036 + ], + [ + -70.842782, + 82.888318 + ], + [ + -70.871382, + 82.881091 + ], + [ + -70.952227, + 82.883609 + ], + [ + -71.018336, + 82.891936 + ], + [ + -71.084164, + 82.900545 + ], + [ + -71.144164, + 82.908327 + ], + [ + -71.219727, + 82.914991 + ], + [ + -71.336673, + 82.914700 + ], + [ + -71.493609, + 82.932209 + ], + [ + -71.567227, + 82.941082 + ], + [ + -71.789718, + 83.010827 + ], + [ + -71.775009, + 83.032209 + ], + [ + -71.750000, + 83.043045 + ], + [ + -71.696382, + 83.057755 + ], + [ + -71.654445, + 83.068882 + ], + [ + -71.589318, + 83.088182 + ], + [ + -71.611664, + 83.096100 + ], + [ + -71.712782, + 83.098873 + ], + [ + -71.831682, + 83.097764 + ], + [ + -72.005573, + 83.099155 + ], + [ + -72.111936, + 83.101091 + ], + [ + -72.226945, + 83.101382 + ], + [ + -72.336400, + 83.097764 + ], + [ + -72.365827, + 83.094145 + ], + [ + -72.400700, + 83.086518 + ], + [ + -72.424164, + 83.079164 + ], + [ + -72.477491, + 83.076664 + ], + [ + -72.523900, + 83.076936 + ], + [ + -72.566464, + 83.088245 + ], + [ + -72.599727, + 83.096936 + ], + [ + -72.650555, + 83.096373 + ], + [ + -72.927491, + 83.067491 + ], + [ + -72.948609, + 83.055255 + ], + [ + -73.033891, + 83.036655 + ], + [ + -73.261945, + 83.007764 + ], + [ + -73.626800, + 82.938864 + ], + [ + -73.640345, + 82.923800 + ], + [ + -73.607500, + 82.913036 + ], + [ + -73.577227, + 82.908036 + ], + [ + -73.495000, + 82.902482 + ], + [ + -73.460827, + 82.898609 + ], + [ + -73.425418, + 82.892073 + ], + [ + -73.401400, + 82.874982 + ], + [ + -73.257509, + 82.825818 + ], + [ + -73.211400, + 82.813873 + ], + [ + -73.027218, + 82.786927 + ], + [ + -72.983891, + 82.783873 + ], + [ + -72.912218, + 82.776655 + ], + [ + -72.716664, + 82.755555 + ], + [ + -72.648900, + 82.746645 + ], + [ + -72.500691, + 82.721373 + ], + [ + -72.599027, + 82.696645 + ], + [ + -72.633900, + 82.694427 + ], + [ + -72.672227, + 82.698591 + ], + [ + -72.700836, + 82.703327 + ], + [ + -72.750000, + 82.714709 + ], + [ + -72.835827, + 82.728591 + ], + [ + -72.906664, + 82.735809 + ], + [ + -72.949718, + 82.738873 + ], + [ + -73.075009, + 82.745818 + ], + [ + -73.160282, + 82.751391 + ], + [ + -73.247218, + 82.761655 + ], + [ + -73.281955, + 82.766391 + ], + [ + -73.548336, + 82.806091 + ], + [ + -73.607773, + 82.815809 + ], + [ + -73.817782, + 82.852764 + ], + [ + -73.851673, + 82.866655 + ], + [ + -73.879436, + 82.897218 + ], + [ + -74.018064, + 82.956936 + ], + [ + -74.084164, + 82.972491 + ], + [ + -74.172773, + 82.991091 + ], + [ + -74.279173, + 83.009991 + ], + [ + -74.408055, + 83.024700 + ], + [ + -74.435818, + 83.027209 + ], + [ + -74.706664, + 83.041091 + ], + [ + -74.797500, + 83.043591 + ], + [ + -74.956391, + 83.045536 + ], + [ + -75.000000, + 83.043882 + ], + [ + -75.046955, + 83.041655 + ], + [ + -75.313327, + 83.027482 + ], + [ + -75.580836, + 83.038036 + ], + [ + -75.948609, + 83.051927 + ], + [ + -75.979718, + 83.053036 + ], + [ + -76.028609, + 83.054427 + ], + [ + -76.079182, + 83.053591 + ], + [ + -76.113327, + 83.050536 + ], + [ + -76.206664, + 83.036655 + ], + [ + -76.266664, + 83.029164 + ], + [ + -76.360273, + 83.021382 + ], + [ + -76.559436, + 83.011936 + ], + [ + -76.863055, + 83.010818 + ], + [ + -77.135564, + 83.011382 + ], + [ + -77.170555, + 83.015545 + ], + [ + -77.135982, + 83.030400 + ], + [ + -77.183882, + 83.033873 + ], + [ + -77.222782, + 83.030545 + ], + [ + -77.252227, + 83.025273 + ], + [ + -77.276109, + 83.020264 + ], + [ + -77.341945, + 83.005555 + ], + [ + -77.379100, + 82.990127 + ], + [ + -77.344727, + 82.972491 + ], + [ + -77.131673, + 82.939973 + ], + [ + -77.066391, + 82.930818 + ], + [ + -77.025836, + 82.927764 + ], + [ + -76.881945, + 82.913609 + ], + [ + -76.844164, + 82.909145 + ], + [ + -76.752791, + 82.894991 + ], + [ + -76.710827, + 82.885818 + ], + [ + -76.666655, + 82.872482 + ], + [ + -76.629164, + 82.859709 + ], + [ + -76.586400, + 82.838591 + ], + [ + -76.545273, + 82.821109 + ], + [ + -76.525282, + 82.813873 + ], + [ + -76.501682, + 82.807755 + ], + [ + -76.447491, + 82.797482 + ], + [ + -76.375273, + 82.789155 + ], + [ + -76.288609, + 82.784718 + ], + [ + -76.241382, + 82.783600 + ], + [ + -76.186391, + 82.783873 + ], + [ + -75.990555, + 82.784918 + ], + [ + -76.014727, + 82.775818 + ], + [ + -76.056945, + 82.771655 + ], + [ + -76.176391, + 82.767209 + ], + [ + -76.226391, + 82.764436 + ], + [ + -76.269455, + 82.760818 + ], + [ + -76.303945, + 82.744500 + ], + [ + -76.275555, + 82.724427 + ], + [ + -76.256391, + 82.717209 + ], + [ + -76.235827, + 82.712200 + ], + [ + -76.103055, + 82.686100 + ], + [ + -75.807500, + 82.654709 + ], + [ + -75.670545, + 82.642764 + ], + [ + -75.625545, + 82.633045 + ], + [ + -75.557500, + 82.628582 + ], + [ + -75.503618, + 82.628864 + ], + [ + -75.468891, + 82.627764 + ], + [ + -75.434718, + 82.623873 + ], + [ + -75.402927, + 82.616918 + ], + [ + -75.420273, + 82.606936 + ], + [ + -75.451673, + 82.603318 + ], + [ + -75.500836, + 82.600264 + ], + [ + -75.606382, + 82.595827 + ], + [ + -75.648055, + 82.591664 + ], + [ + -75.671391, + 82.586927 + ], + [ + -75.773900, + 82.557209 + ], + [ + -75.802782, + 82.546373 + ], + [ + -75.887218, + 82.522218 + ], + [ + -75.975009, + 82.499709 + ], + [ + -76.037782, + 82.484418 + ], + [ + -76.102782, + 82.470536 + ], + [ + -76.184155, + 82.453873 + ], + [ + -76.233891, + 82.444973 + ], + [ + -76.258136, + 82.469236 + ], + [ + -76.203673, + 82.507282 + ], + [ + -76.038891, + 82.557209 + ], + [ + -75.972782, + 82.571382 + ], + [ + -75.938327, + 82.575818 + ], + [ + -75.918609, + 82.579991 + ], + [ + -75.894309, + 82.590127 + ], + [ + -75.913891, + 82.597491 + ], + [ + -76.058882, + 82.616927 + ], + [ + -76.093336, + 82.620818 + ], + [ + -76.387218, + 82.651382 + ], + [ + -76.538327, + 82.664155 + ], + [ + -76.570555, + 82.666655 + ], + [ + -76.605282, + 82.692482 + ], + [ + -76.644164, + 82.709155 + ], + [ + -76.674436, + 82.721373 + ], + [ + -76.708618, + 82.733045 + ], + [ + -76.766664, + 82.750827 + ], + [ + -76.789173, + 82.756382 + ], + [ + -76.815555, + 82.761109 + ], + [ + -76.851109, + 82.765000 + ], + [ + -76.898345, + 82.766100 + ], + [ + -76.950418, + 82.771236 + ], + [ + -76.966664, + 82.804700 + ], + [ + -77.098609, + 82.855955 + ], + [ + -77.128327, + 82.863309 + ], + [ + -77.319455, + 82.873309 + ], + [ + -77.405273, + 82.878864 + ], + [ + -77.467227, + 82.883882 + ], + [ + -77.528064, + 82.891100 + ], + [ + -77.616655, + 82.902773 + ], + [ + -77.698618, + 82.914291 + ], + [ + -77.768336, + 82.922482 + ], + [ + -77.813045, + 82.924427 + ], + [ + -77.863327, + 82.921373 + ], + [ + -77.950000, + 82.914155 + ], + [ + -77.986664, + 82.909991 + ], + [ + -78.080291, + 82.898327 + ], + [ + -78.108336, + 82.893327 + ], + [ + -78.231873, + 82.856573 + ], + [ + -78.194445, + 82.845827 + ], + [ + -78.128873, + 82.836655 + ], + [ + -78.107909, + 82.828609 + ], + [ + -78.144164, + 82.823318 + ], + [ + -78.175555, + 82.827209 + ], + [ + -78.341673, + 82.850536 + ], + [ + -78.500564, + 82.845536 + ], + [ + -78.550827, + 82.855609 + ], + [ + -78.538609, + 82.876645 + ], + [ + -78.521945, + 82.889164 + ], + [ + -78.507782, + 82.910809 + ], + [ + -78.546109, + 82.926655 + ], + [ + -78.631945, + 82.941364 + ], + [ + -78.671109, + 82.945527 + ], + [ + -78.719727, + 82.946636 + ], + [ + -78.756118, + 82.942473 + ], + [ + -78.780291, + 82.938036 + ], + [ + -78.825291, + 82.928036 + ], + [ + -78.928055, + 82.898609 + ], + [ + -79.069300, + 82.897282 + ], + [ + -79.177491, + 82.951936 + ], + [ + -79.370545, + 82.974155 + ], + [ + -79.414445, + 82.975264 + ], + [ + -79.458345, + 82.974155 + ], + [ + -79.793336, + 82.957491 + ], + [ + -79.904727, + 82.951100 + ], + [ + -80.095836, + 82.937191 + ], + [ + -80.398055, + 82.899718 + ], + [ + -80.430000, + 82.890827 + ], + [ + -80.393064, + 82.875536 + ], + [ + -80.277218, + 82.850818 + ], + [ + -80.219727, + 82.841664 + ], + [ + -80.194155, + 82.838318 + ], + [ + -80.158336, + 82.835536 + ], + [ + -80.110000, + 82.834718 + ], + [ + -80.006673, + 82.834427 + ], + [ + -79.896118, + 82.835818 + ], + [ + -79.847782, + 82.834991 + ], + [ + -79.677773, + 82.821518 + ], + [ + -79.942491, + 82.811373 + ], + [ + -79.975827, + 82.808591 + ], + [ + -79.996945, + 82.803318 + ], + [ + -79.936391, + 82.772218 + ], + [ + -79.913327, + 82.765273 + ], + [ + -79.886945, + 82.759427 + ], + [ + -79.836945, + 82.750545 + ], + [ + -79.623045, + 82.727764 + ], + [ + -79.403064, + 82.706373 + ], + [ + -79.331682, + 82.699709 + ], + [ + -79.243055, + 82.695255 + ], + [ + -78.931945, + 82.681655 + ], + [ + -78.895009, + 82.680264 + ], + [ + -78.840836, + 82.680818 + ], + [ + -78.576673, + 82.686918 + ], + [ + -78.531955, + 82.684418 + ], + [ + -78.511955, + 82.679009 + ], + [ + -78.565555, + 82.674700 + ], + [ + -78.843609, + 82.664991 + ], + [ + -79.149991, + 82.667755 + ], + [ + -79.384736, + 82.672764 + ], + [ + -79.468336, + 82.677473 + ], + [ + -79.617491, + 82.693036 + ], + [ + -79.684155, + 82.699709 + ], + [ + -79.747500, + 82.704991 + ], + [ + -79.787509, + 82.707764 + ], + [ + -79.829727, + 82.708882 + ], + [ + -79.885836, + 82.708600 + ], + [ + -79.928609, + 82.705555 + ], + [ + -79.972291, + 82.692618 + ], + [ + -79.848618, + 82.663882 + ], + [ + -79.816255, + 82.652073 + ], + [ + -79.861664, + 82.644145 + ], + [ + -79.941664, + 82.649427 + ], + [ + -80.003064, + 82.656373 + ], + [ + -80.070845, + 82.665545 + ], + [ + -80.160282, + 82.681364 + ], + [ + -80.180127, + 82.695609 + ], + [ + -80.139173, + 82.717900 + ], + [ + -80.158336, + 82.727764 + ], + [ + -80.293336, + 82.774427 + ], + [ + -80.318618, + 82.779982 + ], + [ + -80.381100, + 82.788882 + ], + [ + -80.418336, + 82.792209 + ], + [ + -80.500564, + 82.797482 + ], + [ + -80.801936, + 82.812482 + ], + [ + -80.977218, + 82.820264 + ], + [ + -81.022227, + 82.821927 + ], + [ + -81.359727, + 82.827773 + ], + [ + -81.411391, + 82.827773 + ], + [ + -81.473055, + 82.825000 + ], + [ + -81.514173, + 82.821109 + ], + [ + -81.536391, + 82.816664 + ], + [ + -81.564164, + 82.808873 + ], + [ + -81.579173, + 82.792973 + ], + [ + -81.508618, + 82.764709 + ], + [ + -81.450000, + 82.755555 + ], + [ + -81.305827, + 82.733873 + ], + [ + -81.223618, + 82.715818 + ], + [ + -81.124709, + 82.686918 + ], + [ + -81.097500, + 82.672482 + ], + [ + -81.077227, + 82.666927 + ], + [ + -81.049991, + 82.660809 + ], + [ + -80.994445, + 82.650273 + ], + [ + -80.873891, + 82.629700 + ], + [ + -80.599164, + 82.554427 + ], + [ + -80.579873, + 82.544573 + ], + [ + -80.891955, + 82.532764 + ], + [ + -80.949718, + 82.538036 + ], + [ + -80.989436, + 82.547209 + ], + [ + -81.136127, + 82.578045 + ], + [ + -81.300827, + 82.611100 + ], + [ + -81.359727, + 82.620818 + ], + [ + -81.432500, + 82.629155 + ], + [ + -81.543336, + 82.637209 + ], + [ + -81.931382, + 82.663882 + ], + [ + -81.972227, + 82.666382 + ], + [ + -82.060273, + 82.669709 + ], + [ + -82.155000, + 82.671100 + ], + [ + -82.215291, + 82.668591 + ], + [ + -82.255009, + 82.664427 + ], + [ + -82.288055, + 82.659991 + ], + [ + -82.344864, + 82.648036 + ], + [ + -82.376518, + 82.637218 + ], + [ + -82.391882, + 82.616018 + ], + [ + -82.343891, + 82.595264 + ], + [ + -82.321118, + 82.589155 + ], + [ + -82.263900, + 82.576664 + ], + [ + -81.966400, + 82.528873 + ], + [ + -81.927491, + 82.522764 + ], + [ + -81.880555, + 82.517764 + ], + [ + -81.847227, + 82.515545 + ], + [ + -81.751400, + 82.516936 + ], + [ + -81.713336, + 82.515273 + ], + [ + -81.541945, + 82.500545 + ], + [ + -81.670000, + 82.492482 + ], + [ + -82.091673, + 82.501391 + ], + [ + -82.316955, + 82.506945 + ], + [ + -82.406391, + 82.509155 + ], + [ + -82.458891, + 82.508327 + ], + [ + -82.498336, + 82.506382 + ], + [ + -82.530000, + 82.499845 + ], + [ + -82.704309, + 82.422209 + ], + [ + -82.728673, + 82.398391 + ], + [ + -82.711673, + 82.382473 + ], + [ + -82.679991, + 82.370818 + ], + [ + -82.625545, + 82.359145 + ], + [ + -82.513064, + 82.337773 + ], + [ + -82.454727, + 82.328045 + ], + [ + -82.170545, + 82.286655 + ], + [ + -81.887791, + 82.238036 + ], + [ + -81.825564, + 82.226655 + ], + [ + -81.799727, + 82.222764 + ], + [ + -81.423327, + 82.176927 + ], + [ + -81.324718, + 82.164991 + ], + [ + -81.253345, + 82.159718 + ], + [ + -81.171109, + 82.156373 + ], + [ + -81.051391, + 82.154709 + ], + [ + -80.909164, + 82.156645 + ], + [ + -80.871245, + 82.153036 + ], + [ + -80.899736, + 82.146382 + ], + [ + -80.931382, + 82.142209 + ], + [ + -80.956664, + 82.137209 + ], + [ + -80.975555, + 82.125682 + ], + [ + -80.955700, + 82.113318 + ], + [ + -80.922227, + 82.103591 + ], + [ + -80.878327, + 82.094145 + ], + [ + -80.822236, + 82.083882 + ], + [ + -80.791109, + 82.079436 + ], + [ + -80.725555, + 82.071655 + ], + [ + -80.657227, + 82.064700 + ], + [ + -80.624436, + 82.061918 + ], + [ + -80.368609, + 82.041091 + ], + [ + -80.331682, + 82.038591 + ], + [ + -80.213900, + 82.032209 + ], + [ + -79.916400, + 82.023882 + ], + [ + -79.880827, + 82.021927 + ], + [ + -79.853336, + 82.018873 + ], + [ + -79.835009, + 82.010555 + ], + [ + -79.844455, + 81.971373 + ], + [ + -79.670836, + 81.927473 + ], + [ + -79.579727, + 81.913609 + ], + [ + -79.521118, + 81.905545 + ], + [ + -79.489991, + 81.900273 + ], + [ + -79.452227, + 81.889982 + ], + [ + -79.236809, + 81.816082 + ], + [ + -79.492218, + 81.819718 + ], + [ + -79.534436, + 81.820827 + ], + [ + -79.570691, + 81.827073 + ], + [ + -79.588755, + 81.841236 + ], + [ + -79.610000, + 81.851091 + ], + [ + -79.883055, + 81.924700 + ], + [ + -80.035282, + 81.963045 + ], + [ + -80.085009, + 81.973600 + ], + [ + -80.153609, + 81.981373 + ], + [ + -80.225827, + 81.986100 + ], + [ + -80.432500, + 81.997482 + ], + [ + -80.640291, + 82.018327 + ], + [ + -80.868327, + 82.031373 + ], + [ + -81.091109, + 82.059418 + ], + [ + -81.150282, + 82.068882 + ], + [ + -81.249164, + 82.081373 + ], + [ + -81.353055, + 82.091664 + ], + [ + -81.425282, + 82.097764 + ], + [ + -81.608891, + 82.118591 + ], + [ + -81.918064, + 82.154982 + ], + [ + -82.011127, + 82.168591 + ], + [ + -82.101945, + 82.183045 + ], + [ + -82.160282, + 82.193318 + ], + [ + -82.211118, + 82.204709 + ], + [ + -82.263064, + 82.222218 + ], + [ + -82.286664, + 82.229155 + ], + [ + -82.452791, + 82.249418 + ], + [ + -82.508900, + 82.258045 + ], + [ + -82.621655, + 82.278045 + ], + [ + -82.654445, + 82.282209 + ], + [ + -82.693600, + 82.284718 + ], + [ + -82.735545, + 82.286100 + ], + [ + -82.990827, + 82.292482 + ], + [ + -83.025618, + 82.278464 + ], + [ + -83.027791, + 82.235264 + ], + [ + -83.011400, + 82.221645 + ], + [ + -82.987500, + 82.215000 + ], + [ + -82.940282, + 82.203600 + ], + [ + -82.886945, + 82.193864 + ], + [ + -82.860273, + 82.187764 + ], + [ + -82.772227, + 82.163318 + ], + [ + -82.724309, + 82.146382 + ], + [ + -82.693045, + 82.128718 + ], + [ + -82.680409, + 82.113036 + ], + [ + -82.651945, + 82.100264 + ], + [ + -82.619718, + 82.096100 + ], + [ + -82.584445, + 82.092755 + ], + [ + -82.546391, + 82.090273 + ], + [ + -82.417500, + 82.087200 + ], + [ + -82.243055, + 82.084991 + ], + [ + -82.102491, + 82.085536 + ], + [ + -82.058609, + 82.084718 + ], + [ + -82.020845, + 82.082218 + ], + [ + -81.966109, + 82.071109 + ], + [ + -81.924164, + 82.058873 + ], + [ + -81.885700, + 82.036855 + ], + [ + -81.926100, + 82.034718 + ], + [ + -81.963618, + 82.037200 + ], + [ + -82.055555, + 82.050809 + ], + [ + -82.122218, + 82.058591 + ], + [ + -82.199436, + 82.064145 + ], + [ + -82.284164, + 82.066373 + ], + [ + -82.421664, + 82.066936 + ], + [ + -82.636400, + 82.070545 + ], + [ + -82.674436, + 82.073045 + ], + [ + -82.758055, + 82.076936 + ], + [ + -82.797500, + 82.077773 + ], + [ + -82.888336, + 82.072491 + ], + [ + -82.974164, + 82.064991 + ], + [ + -83.076400, + 82.061918 + ], + [ + -83.123045, + 82.069364 + ], + [ + -83.062500, + 82.080273 + ], + [ + -83.001955, + 82.089155 + ], + [ + -82.965418, + 82.101509 + ], + [ + -82.953064, + 82.119982 + ], + [ + -82.976673, + 82.138318 + ], + [ + -83.000000, + 82.151091 + ], + [ + -83.022782, + 82.159427 + ], + [ + -83.083891, + 82.175809 + ], + [ + -83.130555, + 82.184982 + ], + [ + -83.184155, + 82.194700 + ], + [ + -83.242218, + 82.204164 + ], + [ + -83.308336, + 82.218318 + ], + [ + -83.344455, + 82.227200 + ], + [ + -83.368255, + 82.249782 + ], + [ + -83.364300, + 82.272909 + ], + [ + -83.384736, + 82.282209 + ], + [ + -83.516400, + 82.316936 + ], + [ + -83.606382, + 82.331373 + ], + [ + -83.767500, + 82.353045 + ], + [ + -83.841945, + 82.361373 + ], + [ + -83.876936, + 82.364155 + ], + [ + -83.961400, + 82.368591 + ], + [ + -84.047227, + 82.371373 + ], + [ + -84.095555, + 82.371091 + ], + [ + -84.146955, + 82.369709 + ], + [ + -84.180555, + 82.368045 + ], + [ + -84.228882, + 82.363873 + ], + [ + -84.303327, + 82.355818 + ], + [ + -84.344455, + 82.352764 + ], + [ + -84.384509, + 82.363936 + ], + [ + -84.418336, + 82.381091 + ], + [ + -84.450000, + 82.386109 + ], + [ + -84.482500, + 82.389436 + ], + [ + -84.559718, + 82.394991 + ], + [ + -84.714718, + 82.405818 + ], + [ + -84.888609, + 82.416927 + ], + [ + -84.916655, + 82.420536 + ], + [ + -84.942218, + 82.428873 + ], + [ + -84.895282, + 82.433591 + ], + [ + -84.787782, + 82.434982 + ], + [ + -84.620418, + 82.452618 + ], + [ + -84.641682, + 82.465545 + ], + [ + -84.662782, + 82.468600 + ], + [ + -84.693882, + 82.471373 + ], + [ + -85.003064, + 82.480818 + ], + [ + -85.046955, + 82.481936 + ], + [ + -85.298618, + 82.478045 + ], + [ + -85.502500, + 82.471100 + ], + [ + -85.708618, + 82.463609 + ], + [ + -85.746945, + 82.461382 + ], + [ + -85.794727, + 82.458600 + ], + [ + -85.819736, + 82.454436 + ], + [ + -85.910755, + 82.428936 + ], + [ + -85.866945, + 82.421918 + ], + [ + -85.669445, + 82.409427 + ], + [ + -85.524727, + 82.405400 + ], + [ + -85.501545, + 82.396236 + ], + [ + -85.531682, + 82.369709 + ], + [ + -85.515018, + 82.343318 + ], + [ + -85.485836, + 82.316655 + ], + [ + -85.457227, + 82.307482 + ], + [ + -85.396391, + 82.296936 + ], + [ + -85.364509, + 82.284045 + ], + [ + -85.413891, + 82.276091 + ], + [ + -85.508345, + 82.273036 + ], + [ + -85.557773, + 82.269436 + ], + [ + -85.580564, + 82.264436 + ], + [ + -85.601391, + 82.251791 + ], + [ + -85.620000, + 82.243591 + ], + [ + -85.662218, + 82.239700 + ], + [ + -85.706118, + 82.238036 + ], + [ + -85.753891, + 82.237491 + ], + [ + -85.798891, + 82.237764 + ], + [ + -85.841382, + 82.239155 + ], + [ + -85.934155, + 82.238873 + ], + [ + -85.984436, + 82.237491 + ], + [ + -86.137791, + 82.226927 + ], + [ + -86.181109, + 82.225264 + ], + [ + -86.228882, + 82.224700 + ], + [ + -86.316664, + 82.224700 + ], + [ + -86.520009, + 82.229709 + ], + [ + -86.571673, + 82.230273 + ], + [ + -86.619445, + 82.229709 + ], + [ + -86.669445, + 82.228318 + ], + [ + -86.764173, + 82.221645 + ], + [ + -86.843609, + 82.212491 + ], + [ + -86.868464, + 82.197482 + ], + [ + -86.752227, + 82.141100 + ], + [ + -86.731382, + 82.136382 + ], + [ + -86.706118, + 82.131927 + ], + [ + -86.637509, + 82.124418 + ], + [ + -86.565555, + 82.118864 + ], + [ + -86.485000, + 82.114155 + ], + [ + -86.278882, + 82.107209 + ], + [ + -86.091109, + 82.104427 + ], + [ + -86.062209, + 82.103864 + ], + [ + -85.999727, + 82.094145 + ], + [ + -85.915836, + 82.077482 + ], + [ + -85.851673, + 82.067218 + ], + [ + -85.755845, + 82.058873 + ], + [ + -85.678327, + 82.054427 + ], + [ + -85.405836, + 82.042209 + ], + [ + -85.116945, + 82.033055 + ], + [ + -85.039991, + 82.028591 + ], + [ + -84.932500, + 82.019436 + ], + [ + -84.899736, + 82.015273 + ], + [ + -84.840836, + 82.006100 + ], + [ + -84.815282, + 82.000827 + ], + [ + -84.751682, + 81.984709 + ], + [ + -84.722218, + 81.973591 + ], + [ + -84.613464, + 81.888455 + ], + [ + -84.635282, + 81.886109 + ], + [ + -84.656109, + 81.887773 + ], + [ + -84.688600, + 81.891936 + ], + [ + -84.746855, + 81.909455 + ], + [ + -84.793436, + 81.927782 + ], + [ + -84.817082, + 81.961864 + ], + [ + -84.831682, + 81.979427 + ], + [ + -84.858891, + 81.985264 + ], + [ + -84.889173, + 81.990264 + ], + [ + -84.929173, + 81.993045 + ], + [ + -85.001109, + 81.994136 + ], + [ + -85.056518, + 81.989982 + ], + [ + -85.043055, + 81.970682 + ], + [ + -85.025282, + 81.960818 + ], + [ + -84.994718, + 81.948591 + ], + [ + -84.915009, + 81.918045 + ], + [ + -84.863891, + 81.900273 + ], + [ + -84.836682, + 81.889755 + ], + [ + -84.879436, + 81.887500 + ], + [ + -84.984164, + 81.911100 + ], + [ + -85.018891, + 81.919436 + ], + [ + -85.096391, + 81.945818 + ], + [ + -85.140564, + 81.966091 + ], + [ + -85.162918, + 81.982482 + ], + [ + -85.188327, + 81.992755 + ], + [ + -85.217500, + 81.995527 + ], + [ + -85.258618, + 81.996936 + ], + [ + -85.559436, + 82.001664 + ], + [ + -85.650555, + 81.998318 + ], + [ + -85.693882, + 81.994982 + ], + [ + -85.730209, + 81.986164 + ], + [ + -85.654727, + 81.950818 + ], + [ + -85.566100, + 81.924991 + ], + [ + -85.469455, + 81.899718 + ], + [ + -85.441936, + 81.893873 + ], + [ + -85.391945, + 81.878036 + ], + [ + -85.372773, + 81.861791 + ], + [ + -85.379436, + 81.856936 + ], + [ + -85.422500, + 81.857482 + ], + [ + -85.467227, + 81.867200 + ], + [ + -85.628873, + 81.916091 + ], + [ + -85.731382, + 81.950000 + ], + [ + -85.767500, + 81.961927 + ], + [ + -85.815000, + 81.973873 + ], + [ + -85.914445, + 81.997482 + ], + [ + -85.960555, + 82.007491 + ], + [ + -86.016109, + 82.016664 + ], + [ + -86.169155, + 82.041655 + ], + [ + -86.202791, + 82.045536 + ], + [ + -86.239164, + 82.048600 + ], + [ + -86.278064, + 82.050809 + ], + [ + -86.356382, + 82.053591 + ], + [ + -86.583618, + 82.053864 + ], + [ + -86.791945, + 82.058027 + ], + [ + -86.843336, + 82.057209 + ], + [ + -86.892500, + 82.054155 + ], + [ + -86.931673, + 82.049427 + ], + [ + -86.994309, + 82.038036 + ], + [ + -87.128745, + 81.966100 + ], + [ + -87.098055, + 81.958327 + ], + [ + -87.066100, + 81.954991 + ], + [ + -86.919445, + 81.942745 + ], + [ + -86.863618, + 81.933591 + ], + [ + -86.834727, + 81.927764 + ], + [ + -86.733400, + 81.899645 + ], + [ + -86.768336, + 81.890273 + ], + [ + -86.804173, + 81.893055 + ], + [ + -86.828891, + 81.897491 + ], + [ + -86.877209, + 81.909427 + ], + [ + -86.939436, + 81.918873 + ], + [ + -87.063327, + 81.934418 + ], + [ + -87.101673, + 81.937764 + ], + [ + -87.169155, + 81.945527 + ], + [ + -87.265836, + 81.958882 + ], + [ + -87.305964, + 81.974918 + ], + [ + -87.258055, + 81.989427 + ], + [ + -87.232773, + 81.993318 + ], + [ + -87.185373, + 82.016691 + ], + [ + -87.230564, + 82.036927 + ], + [ + -87.271664, + 82.047764 + ], + [ + -87.343200, + 82.065264 + ], + [ + -87.402218, + 82.073882 + ], + [ + -87.501400, + 82.084155 + ], + [ + -87.599727, + 82.089155 + ], + [ + -87.641955, + 82.090273 + ], + [ + -87.666400, + 82.089427 + ], + [ + -87.710282, + 82.085400 + ], + [ + -87.911945, + 82.090818 + ], + [ + -88.038327, + 82.103864 + ], + [ + -88.088891, + 82.098945 + ], + [ + -88.113618, + 82.090545 + ], + [ + -88.145009, + 82.086927 + ], + [ + -88.250000, + 82.080827 + ], + [ + -88.296664, + 82.080273 + ], + [ + -88.443055, + 82.075000 + ], + [ + -88.543064, + 82.070545 + ], + [ + -88.589718, + 82.066664 + ], + [ + -88.625545, + 82.062764 + ], + [ + -88.773055, + 82.039427 + ], + [ + -88.963900, + 82.008045 + ], + [ + -89.021118, + 81.998027 + ], + [ + -89.048055, + 81.984155 + ], + [ + -89.011945, + 81.958600 + ], + [ + -88.989582, + 81.948182 + ], + [ + -88.998473, + 81.918318 + ], + [ + -89.033327, + 81.912200 + ], + [ + -89.074718, + 81.911655 + ], + [ + -89.149800, + 81.923364 + ], + [ + -89.249727, + 81.941082 + ], + [ + -89.288891, + 81.943036 + ], + [ + -89.338900, + 81.940264 + ], + [ + -89.371655, + 81.935809 + ], + [ + -89.413264, + 81.921927 + ], + [ + -89.397509, + 81.909427 + ], + [ + -89.367218, + 81.905545 + ], + [ + -89.328064, + 81.902209 + ], + [ + -89.203682, + 81.883282 + ], + [ + -89.235973, + 81.849291 + ], + [ + -89.356382, + 81.811100 + ], + [ + -89.425000, + 81.815264 + ], + [ + -89.461400, + 81.818055 + ], + [ + -89.629991, + 81.856373 + ], + [ + -89.649445, + 81.863309 + ], + [ + -89.679855, + 81.900818 + ], + [ + -89.700836, + 81.915545 + ], + [ + -89.735827, + 81.917482 + ], + [ + -89.783327, + 81.917209 + ], + [ + -89.990827, + 81.905545 + ], + [ + -90.154445, + 81.896655 + ], + [ + -90.245273, + 81.896100 + ], + [ + -90.338055, + 81.893055 + ], + [ + -90.436664, + 81.887500 + ], + [ + -90.565282, + 81.878036 + ], + [ + -90.610000, + 81.873873 + ], + [ + -90.635009, + 81.868864 + ], + [ + -90.689709, + 81.851191 + ], + [ + -90.727491, + 81.841091 + ], + [ + -90.852218, + 81.842482 + ], + [ + -91.001109, + 81.832764 + ], + [ + -91.051664, + 81.828873 + ], + [ + -91.101109, + 81.818882 + ], + [ + -91.144445, + 81.800609 + ], + [ + -91.117764, + 81.784427 + ], + [ + -91.090564, + 81.777209 + ], + [ + -91.050545, + 81.768245 + ], + [ + -91.212509, + 81.759427 + ], + [ + -91.255009, + 81.759155 + ], + [ + -91.287509, + 81.761936 + ], + [ + -91.351391, + 81.770264 + ], + [ + -91.386127, + 81.773882 + ], + [ + -91.485545, + 81.769991 + ], + [ + -91.724727, + 81.714145 + ], + [ + -91.737209, + 81.686918 + ], + [ + -91.770845, + 81.663318 + ], + [ + -91.801100, + 81.658600 + ], + [ + -91.838900, + 81.658600 + ], + [ + -91.867764, + 81.663318 + ], + [ + -91.902218, + 81.666927 + ], + [ + -91.926936, + 81.664991 + ], + [ + -91.953045, + 81.660400 + ], + [ + -91.943600, + 81.628309 + ], + [ + -91.912664, + 81.620181 + ], + [ + -93.517500, + 81.384991 + ], + [ + -93.559091, + 81.373309 + ], + [ + -93.533327, + 81.348600 + ], + [ + -93.487427, + 81.322555 + ], + [ + -93.515291, + 81.310527 + ], + [ + -93.553327, + 81.305545 + ], + [ + -93.602918, + 81.313455 + ], + [ + -93.630418, + 81.324436 + ], + [ + -93.665836, + 81.332764 + ], + [ + -93.694445, + 81.337491 + ], + [ + -93.755009, + 81.344709 + ], + [ + -93.789445, + 81.348036 + ], + [ + -94.035282, + 81.363309 + ], + [ + -94.068073, + 81.363309 + ], + [ + -94.153882, + 81.359709 + ], + [ + -94.200564, + 81.355545 + ], + [ + -94.240827, + 81.350818 + ], + [ + -94.273609, + 81.344018 + ], + [ + -94.378200, + 81.278736 + ], + [ + -94.385700, + 81.254436 + ], + [ + -94.302491, + 81.234982 + ], + [ + -94.282227, + 81.231091 + ], + [ + -94.200564, + 81.221100 + ], + [ + -94.166400, + 81.218045 + ], + [ + -94.031391, + 81.208882 + ], + [ + -93.928882, + 81.203873 + ], + [ + -93.852218, + 81.203045 + ], + [ + -93.728336, + 81.207218 + ], + [ + -93.687500, + 81.210264 + ], + [ + -93.514727, + 81.217755 + ], + [ + -93.419445, + 81.219982 + ], + [ + -93.259736, + 81.212200 + ], + [ + -93.121109, + 81.182755 + ], + [ + -93.094655, + 81.158391 + ], + [ + -93.123318, + 81.115264 + ], + [ + -93.157636, + 81.093318 + ], + [ + -93.255845, + 81.082764 + ], + [ + -93.299991, + 81.079709 + ], + [ + -93.517227, + 81.084427 + ], + [ + -93.689436, + 81.093045 + ], + [ + -93.795273, + 81.099427 + ], + [ + -93.866391, + 81.103045 + ], + [ + -93.907227, + 81.101655 + ], + [ + -93.935273, + 81.098327 + ], + [ + -93.960827, + 81.094145 + ], + [ + -93.989718, + 81.092482 + ], + [ + -94.130282, + 81.092755 + ], + [ + -94.154727, + 81.093873 + ], + [ + -94.205273, + 81.103318 + ], + [ + -94.230836, + 81.110536 + ], + [ + -94.255282, + 81.115536 + ], + [ + -94.278336, + 81.117200 + ], + [ + -94.313045, + 81.115536 + ], + [ + -94.354709, + 81.102827 + ], + [ + -94.328609, + 81.089427 + ], + [ + -94.182500, + 81.068055 + ], + [ + -94.042218, + 81.055545 + ], + [ + -94.013336, + 81.053591 + ], + [ + -93.907218, + 81.039982 + ], + [ + -94.071673, + 81.024991 + ], + [ + -94.143618, + 81.015827 + ], + [ + -94.337364, + 80.976927 + ], + [ + -94.365555, + 80.968873 + ], + [ + -94.408618, + 80.965545 + ], + [ + -94.434155, + 80.965545 + ], + [ + -94.472782, + 80.969145 + ], + [ + -94.499545, + 80.988800 + ], + [ + -94.493882, + 81.017491 + ], + [ + -94.546109, + 81.033327 + ], + [ + -94.572782, + 81.038882 + ], + [ + -94.663055, + 81.048600 + ], + [ + -94.814164, + 81.054155 + ], + [ + -94.943327, + 81.048873 + ], + [ + -95.183064, + 81.019718 + ], + [ + -95.220836, + 81.011382 + ], + [ + -95.248055, + 81.001373 + ], + [ + -95.260982, + 80.974845 + ], + [ + -95.283618, + 80.950000 + ], + [ + -95.311936, + 80.939145 + ], + [ + -95.334164, + 80.934709 + ], + [ + -95.422773, + 80.920818 + ], + [ + -95.472009, + 80.896136 + ], + [ + -95.413327, + 80.885273 + ], + [ + -95.300827, + 80.885273 + ], + [ + -95.170545, + 80.884718 + ], + [ + -95.148755, + 80.882482 + ], + [ + -95.170836, + 80.875809 + ], + [ + -95.212782, + 80.868318 + ], + [ + -95.371382, + 80.853318 + ], + [ + -95.440282, + 80.846100 + ], + [ + -95.500836, + 80.838318 + ], + [ + -95.527218, + 80.819291 + ], + [ + -95.501109, + 80.806927 + ], + [ + -95.475555, + 80.803036 + ], + [ + -95.442764, + 80.799709 + ], + [ + -95.334164, + 80.788882 + ], + [ + -95.282500, + 80.786100 + ], + [ + -95.243055, + 80.787764 + ], + [ + -95.025282, + 80.801655 + ], + [ + -95.036045, + 80.773464 + ], + [ + -94.895555, + 80.747755 + ], + [ + -94.722782, + 80.728591 + ], + [ + -94.694718, + 80.726655 + ], + [ + -94.659727, + 80.725264 + ], + [ + -94.549436, + 80.724991 + ], + [ + -94.491109, + 80.726927 + ], + [ + -94.449155, + 80.730273 + ], + [ + -94.423045, + 80.734982 + ], + [ + -94.304445, + 80.733873 + ], + [ + -94.140291, + 80.721927 + ], + [ + -94.108336, + 80.718873 + ], + [ + -94.081809, + 80.707491 + ], + [ + -94.117491, + 80.698591 + ], + [ + -94.199718, + 80.693036 + ], + [ + -94.231673, + 80.692200 + ], + [ + -94.331118, + 80.693864 + ], + [ + -94.439164, + 80.697482 + ], + [ + -94.514727, + 80.696364 + ], + [ + -94.553609, + 80.694973 + ], + [ + -94.596955, + 80.690536 + ], + [ + -94.628327, + 80.685809 + ], + [ + -94.664373, + 80.663555 + ], + [ + -94.552636, + 80.602764 + ], + [ + -94.524173, + 80.598327 + ], + [ + -94.484727, + 80.598327 + ], + [ + -94.457509, + 80.600264 + ], + [ + -94.437773, + 80.605545 + ], + [ + -94.308336, + 80.606373 + ], + [ + -94.093609, + 80.593318 + ], + [ + -94.005009, + 80.585264 + ], + [ + -93.973618, + 80.581936 + ], + [ + -93.949155, + 80.578045 + ], + [ + -93.894727, + 80.565809 + ], + [ + -93.810545, + 80.541364 + ], + [ + -93.786664, + 80.528800 + ], + [ + -93.839445, + 80.518600 + ], + [ + -93.866945, + 80.518327 + ], + [ + -93.899173, + 80.519145 + ], + [ + -93.965555, + 80.533664 + ], + [ + -94.010555, + 80.549427 + ], + [ + -94.230836, + 80.556364 + ], + [ + -94.375000, + 80.557209 + ], + [ + -94.554991, + 80.554427 + ], + [ + -94.658618, + 80.555818 + ], + [ + -94.696655, + 80.556927 + ], + [ + -94.762364, + 80.560673 + ], + [ + -94.823627, + 80.569718 + ], + [ + -94.846955, + 80.574709 + ], + [ + -94.902500, + 80.586655 + ], + [ + -94.962782, + 80.599718 + ], + [ + -94.995000, + 80.603045 + ], + [ + -95.030836, + 80.603318 + ], + [ + -95.067227, + 80.601382 + ], + [ + -95.132218, + 80.593873 + ], + [ + -95.172227, + 80.591373 + ], + [ + -95.246655, + 80.589982 + ], + [ + -95.318345, + 80.590818 + ], + [ + -95.423618, + 80.593600 + ], + [ + -95.498045, + 80.592482 + ], + [ + -95.536391, + 80.590818 + ], + [ + -95.671664, + 80.584718 + ], + [ + -95.941664, + 80.586382 + ], + [ + -95.980000, + 80.584718 + ], + [ + -97.228982, + 80.442143 + ], + [ + -98.477964, + 80.297429 + ], + [ + -99.726945, + 80.150545 + ], + [ + -99.759173, + 80.149718 + ], + [ + -99.795273, + 80.147764 + ], + [ + -99.827227, + 80.143600 + ], + [ + -100.023618, + 80.099718 + ], + [ + -100.065555, + 80.089982 + ], + [ + -100.081682, + 80.084427 + ], + [ + -100.193327, + 80.033873 + ], + [ + -101.236385, + 79.896661 + ], + [ + -102.279444, + 79.757580 + ], + [ + -103.322502, + 79.616604 + ], + [ + -104.365560, + 79.473711 + ], + [ + -105.408618, + 79.328873 + ], + [ + -105.439991, + 79.329164 + ], + [ + -106.434104, + 79.217227 + ], + [ + -107.428216, + 79.104131 + ], + [ + -108.422329, + 78.989862 + ], + [ + -109.416442, + 78.874411 + ], + [ + -110.410555, + 78.757764 + ], + [ + -110.430555, + 78.758609 + ], + [ + -110.460282, + 78.757491 + ], + [ + -110.637509, + 78.748600 + ], + [ + -110.791109, + 78.735264 + ], + [ + -110.956127, + 78.718318 + ], + [ + -111.160555, + 78.691655 + ], + [ + -111.363055, + 78.642764 + ], + [ + -111.385618, + 78.616091 + ], + [ + -111.455564, + 78.592755 + ], + [ + -111.572245, + 78.588591 + ], + [ + -111.600827, + 78.585264 + ], + [ + -111.641955, + 78.574155 + ], + [ + -111.677782, + 78.563036 + ], + [ + -111.752500, + 78.550536 + ], + [ + -111.809718, + 78.545255 + ], + [ + -111.853055, + 78.542755 + ], + [ + -111.873045, + 78.544436 + ], + [ + -111.903609, + 78.548873 + ], + [ + -111.988055, + 78.552764 + ], + [ + -112.129709, + 78.551927 + ], + [ + -112.238055, + 78.547209 + ], + [ + -112.311664, + 78.539982 + ], + [ + -112.363055, + 78.533327 + ], + [ + -112.607500, + 78.499418 + ], + [ + -112.711673, + 78.484709 + ], + [ + -113.038327, + 78.436918 + ], + [ + -113.119718, + 78.421927 + ], + [ + -113.216109, + 78.385273 + ], + [ + -113.333327, + 78.330818 + ], + [ + -114.303327, + 78.070545 + ], + [ + -114.326945, + 78.071109 + ], + [ + -114.355000, + 78.070545 + ], + [ + -114.400836, + 78.067491 + ], + [ + -114.605827, + 78.030545 + ], + [ + -114.740282, + 78.000000 + ], + [ + -114.777500, + 77.981655 + ], + [ + -114.797782, + 77.975536 + ], + [ + -114.819736, + 77.973036 + ], + [ + -114.930282, + 77.960536 + ], + [ + -115.033891, + 77.962200 + ], + [ + -115.060545, + 77.963882 + ], + [ + -115.090845, + 77.963609 + ], + [ + -115.112700, + 77.957491 + ], + [ + -115.077218, + 77.938582 + ], + [ + -114.848345, + 77.854709 + ] + ] + ] + } + } + ] +} \ No newline at end of file diff --git a/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc b/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc new file mode 100644 index 000000000..90d2a4dd0 Binary files /dev/null and b/mesh_tools/mesh_conversion_tools/test/land_mask_final.nc differ diff --git a/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc b/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc new file mode 100644 index 000000000..eecf5aedc Binary files /dev/null and b/mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc differ diff --git a/grid_gen/periodic_hex/Makefile b/mesh_tools/periodic_hex/Makefile similarity index 100% rename from grid_gen/periodic_hex/Makefile rename to mesh_tools/periodic_hex/Makefile diff --git a/grid_gen/periodic_hex/cells.ncl b/mesh_tools/periodic_hex/cells.ncl similarity index 100% rename from grid_gen/periodic_hex/cells.ncl rename to mesh_tools/periodic_hex/cells.ncl diff --git a/grid_gen/periodic_hex/mark_periodic_boundaries_for_culling.py b/mesh_tools/periodic_hex/mark_periodic_boundaries_for_culling.py similarity index 100% rename from grid_gen/periodic_hex/mark_periodic_boundaries_for_culling.py rename to mesh_tools/periodic_hex/mark_periodic_boundaries_for_culling.py diff --git a/grid_gen/periodic_hex/module_cell_indexing.F b/mesh_tools/periodic_hex/module_cell_indexing.F similarity index 100% rename from grid_gen/periodic_hex/module_cell_indexing.F rename to mesh_tools/periodic_hex/module_cell_indexing.F diff --git a/grid_gen/periodic_hex/module_write_netcdf.F b/mesh_tools/periodic_hex/module_write_netcdf.F similarity index 74% rename from grid_gen/periodic_hex/module_write_netcdf.F rename to mesh_tools/periodic_hex/module_write_netcdf.F index ba430aedd..debbb8ac5 100644 --- a/grid_gen/periodic_hex/module_write_netcdf.F +++ b/mesh_tools/periodic_hex/module_write_netcdf.F @@ -9,8 +9,6 @@ module write_netcdf integer :: wrDimIDmaxEdges2 integer :: wrDimIDTWO integer :: wrDimIDvertexDegree - integer :: wrDimIDnVertLevels - integer :: wrDimIDnTracers integer :: wrVarIDlatCell integer :: wrVarIDlonCell integer :: wrVarIDxCell @@ -46,25 +44,12 @@ module write_netcdf integer :: wrVarIDedgesOnVertex integer :: wrVarIDcellsOnVertex integer :: wrVarIDkiteAreasOnVertex - integer :: wrVarIDfEdge - integer :: wrVarIDfVertex - integer :: wrVarIDh_s - integer :: wrVarIDu - integer :: wrVarIDuBC - integer :: wrVarIDv - integer :: wrVarIDh - integer :: wrVarIDvh - integer :: wrVarIDcirculation - integer :: wrVarIDvorticity - integer :: wrVarIDke - integer :: wrVarIDtracers + integer :: wrVarIDmeshDensity integer :: wrLocalnCells integer :: wrLocalnEdges integer :: wrLocalnVertices integer :: wrLocalmaxEdges - integer :: wrLocalnVertLevels - integer :: wrLocalnTracers contains @@ -73,8 +58,6 @@ subroutine write_netcdf_init( & nEdges, & nVertices, & maxEdges, & - nVertLevels, & - nTracers, & vertexDegree, & dc, & nx, & @@ -89,8 +72,6 @@ subroutine write_netcdf_init( & integer, intent(in) :: nEdges integer, intent(in) :: nVertices integer, intent(in) :: maxEdges - integer, intent(in) :: nVertLevels - integer, intent(in) :: nTracers integer, intent(in) :: vertexDegree real (kind=8), intent(in) :: dc integer, intent(in) :: nx @@ -108,8 +89,6 @@ subroutine write_netcdf_init( & wrLocalnEdges = nEdges wrLocalnVertices = nVertices wrLocalmaxEdges = maxEdges - wrLocalnVertLevels = nVertLevels - wrLocalnTracers = nTracers on_a_sphere = 'NO' is_periodic = 'YES' @@ -129,8 +108,6 @@ subroutine write_netcdf_init( & nferr = nf_def_dim(wr_ncid, 'maxEdges2', 2*maxEdges, wrDimIDmaxEdges2) nferr = nf_def_dim(wr_ncid, 'TWO', 2, wrDimIDTWO) nferr = nf_def_dim(wr_ncid, 'vertexDegree', vertexDegree, wrDimIDvertexDegree) - nferr = nf_def_dim(wr_ncid, 'nVertLevels', nVertLevels, wrDimIDnVertLevels) - nferr = nf_def_dim(wr_ncid, 'nTracers', nTracers, wrDimIDnTracers) nferr = nf_def_dim(wr_ncid, 'Time', NF_UNLIMITED, wrDimIDTime) @@ -227,48 +204,8 @@ subroutine write_netcdf_init( & dimlist( 1) = wrDimIDvertexDegree dimlist( 2) = wrDimIDnVertices nferr = nf_def_var(wr_ncid, 'kiteAreasOnVertex', NF_DOUBLE, 2, dimlist, wrVarIDkiteAreasOnVertex) - dimlist( 1) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'fEdge', NF_DOUBLE, 1, dimlist, wrVarIDfEdge) - dimlist( 1) = wrDimIDnVertices - nferr = nf_def_var(wr_ncid, 'fVertex', NF_DOUBLE, 1, dimlist, wrVarIDfVertex) dimlist( 1) = wrDimIDnCells - nferr = nf_def_var(wr_ncid, 'h_s', NF_DOUBLE, 1, dimlist, wrVarIDh_s) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'u', NF_DOUBLE, 3, dimlist, wrVarIDu) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - nferr = nf_def_var(wr_ncid, 'uBC', NF_INT, 2, dimlist, wrVarIDuBC) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'v', NF_DOUBLE, 3, dimlist, wrVarIDv) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'h', NF_DOUBLE, 3, dimlist, wrVarIDh) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnEdges - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'vh', NF_DOUBLE, 3, dimlist, wrVarIDvh) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'circulation', NF_DOUBLE, 3, dimlist, wrVarIDcirculation) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnVertices - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'vorticity', NF_DOUBLE, 3, dimlist, wrVarIDvorticity) - dimlist( 1) = wrDimIDnVertLevels - dimlist( 2) = wrDimIDnCells - dimlist( 3) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'ke', NF_DOUBLE, 3, dimlist, wrVarIDke) - dimlist( 1) = wrDimIDnTracers - dimlist( 2) = wrDimIDnVertLevels - dimlist( 3) = wrDimIDnCells - dimlist( 4) = wrDimIDTime - nferr = nf_def_var(wr_ncid, 'tracers', NF_DOUBLE, 4, dimlist, wrVarIDtracers) + nferr = nf_def_var(wr_ncid, 'meshDensity', NF_DOUBLE, 1, dimlist, wrVarIDmeshDensity) nferr = nf_enddef(wr_ncid) @@ -276,7 +213,6 @@ end subroutine write_netcdf_init subroutine write_netcdf_fields( & - time, & latCell, & lonCell, & xCell, & @@ -312,25 +248,13 @@ subroutine write_netcdf_fields( & edgesOnVertex, & cellsOnVertex, & kiteAreasOnVertex, & - fEdge, & - fVertex, & - h_s, & - uBC, & - u, & - v, & - h, & - vh, & - circulation, & - vorticity, & - ke, & - tracers & + meshDensity & ) implicit none include 'netcdf.inc' - integer, intent(in) :: time real (kind=8), dimension(:), intent(in) :: latCell real (kind=8), dimension(:), intent(in) :: lonCell real (kind=8), dimension(:), intent(in) :: xCell @@ -366,39 +290,17 @@ subroutine write_netcdf_fields( & integer, dimension(:,:), intent(in) :: edgesOnVertex integer, dimension(:,:), intent(in) :: cellsOnVertex real (kind=8), dimension(:,:), intent(in) :: kiteAreasOnVertex - real (kind=8), dimension(:), intent(in) :: fEdge - real (kind=8), dimension(:), intent(in) :: fVertex - real (kind=8), dimension(:), intent(in) :: h_s - integer, dimension(:,:), intent(in) :: uBC - real (kind=8), dimension(:,:,:), intent(in) :: u - real (kind=8), dimension(:,:,:), intent(in) :: v - real (kind=8), dimension(:,:,:), intent(in) :: h - real (kind=8), dimension(:,:,:), intent(in) :: vh - real (kind=8), dimension(:,:,:), intent(in) :: circulation - real (kind=8), dimension(:,:,:), intent(in) :: vorticity - real (kind=8), dimension(:,:,:), intent(in) :: ke - real (kind=8), dimension(:,:,:,:), intent(in) :: tracers + real (kind=8), dimension(:), intent(in) :: meshDensity integer :: nferr integer, dimension(1) :: start1, count1 integer, dimension(2) :: start2, count2 - integer, dimension(3) :: start3, count3 - integer, dimension(4) :: start4, count4 start1(1) = 1 start2(1) = 1 start2(2) = 1 - start3(1) = 1 - start3(2) = 1 - start3(3) = 1 - - start4(1) = 1 - start4(2) = 1 - start4(3) = 1 - start4(4) = 1 - start1(1) = 1 count1( 1) = wrLocalnCells nferr = nf_put_vara_double(wr_ncid, wrVarIDlatCell, start1, count1, latCell) @@ -548,73 +450,10 @@ subroutine write_netcdf_fields( & count2( 1) = 3 count2( 2) = wrLocalnVertices nferr = nf_put_vara_double(wr_ncid, wrVarIDkiteAreasOnVertex, start2, count2, kiteAreasOnVertex) - - start1(1) = 1 - count1( 1) = wrLocalnEdges - nferr = nf_put_vara_double(wr_ncid, wrVarIDfEdge, start1, count1, fEdge) - - start1(1) = 1 - count1( 1) = wrLocalnVertices - nferr = nf_put_vara_double(wr_ncid, wrVarIDfVertex, start1, count1, fVertex) - + start1(1) = 1 count1( 1) = wrLocalnCells - nferr = nf_put_vara_double(wr_ncid, wrVarIDh_s, start1, count1, h_s) - - start2(2) = 1 - count2( 1) = wrLocalnVertLevels - count2( 2) = wrLocalnEdges - nferr = nf_put_vara_int(wr_ncid, wrVarIDuBC, start2, count2, u) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDu, start3, count3, u) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDv, start3, count3, v) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDh, start3, count3, h) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnEdges - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDvh, start3, count3, vh) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnVertices - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDcirculation, start3, count3, circulation) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnVertices - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDvorticity, start3, count3, vorticity) - - start3(3) = time - count3( 1) = wrLocalnVertLevels - count3( 2) = wrLocalnCells - count3( 3) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDke, start3, count3, ke) - - start4(4) = time - count4( 1) = wrLocalnTracers - count4( 2) = wrLocalnVertLevels - count4( 3) = wrLocalnCells - count4( 4) = 1 - nferr = nf_put_vara_double(wr_ncid, wrVarIDtracers, start4, count4, tracers) - + nferr = nf_put_vara_double(wr_ncid, wrVarIDmeshDensity, start1, count1, meshDensity) end subroutine write_netcdf_fields diff --git a/grid_gen/periodic_hex/namelist.input b/mesh_tools/periodic_hex/namelist.input similarity index 100% rename from grid_gen/periodic_hex/namelist.input rename to mesh_tools/periodic_hex/namelist.input diff --git a/grid_gen/periodic_hex/periodic_grid.F b/mesh_tools/periodic_hex/periodic_grid.F similarity index 82% rename from grid_gen/periodic_hex/periodic_grid.F rename to mesh_tools/periodic_hex/periodic_grid.F index ef208a598..f6e78b125 100644 --- a/grid_gen/periodic_hex/periodic_grid.F +++ b/mesh_tools/periodic_hex/periodic_grid.F @@ -16,22 +16,19 @@ program hexagonal_periodic_grid integer, allocatable, dimension(:) :: nEdgesOnCell, nEdgesOnEdge integer, allocatable, dimension(:,:) :: cellsOnCell, edgesOnCell, verticesOnCell integer, allocatable, dimension(:,:) :: cellsOnEdge, edgesOnEdge, verticesOnEdge - integer, allocatable, dimension(:,:) :: edgesOnVertex, cellsOnVertex, uBC + integer, allocatable, dimension(:,:) :: edgesOnVertex, cellsOnVertex real (kind=8), allocatable, dimension(:) :: areaTriangle, areaCell, angleEdge real (kind=8), allocatable, dimension(:) :: dcEdge, dvEdge real (kind=8), allocatable, dimension(:) :: latCell, lonCell, xCell, yCell, zCell real (kind=8), allocatable, dimension(:) :: latEdge, lonEdge, xEdge, yEdge, zEdge real (kind=8), allocatable, dimension(:) :: latVertex, lonVertex, xVertex, yVertex, zVertex + real (kind=8), allocatable, dimension(:) :: meshDensity real (kind=8), allocatable, dimension(:,:) :: weightsOnEdge, kiteAreasOnVertex - real (kind=8), allocatable, dimension(:) :: fEdge, fVertex, h_s - real (kind=8), allocatable, dimension(:,:,:) :: u, v, h, vh, circulation, vorticity, ke - real (kind=8), allocatable, dimension(:,:,:,:) :: tracers integer :: i, j, np, iCell integer :: nCells, nEdges, nVertices integer :: iRow, iCol, ii, jj integer :: nprocx, nprocy - real (kind=8) :: r character (len=32) :: decomp_fname call cell_indexing_read_nl() @@ -81,20 +78,8 @@ program hexagonal_periodic_grid allocate(xVertex(nVertices)) allocate(yVertex(nVertices)) allocate(zVertex(nVertices)) + allocate(meshDensity(nCells)) - allocate(fEdge(nEdges)) - allocate(fVertex(nVertices)) - allocate(h_s(nCells)) - allocate(uBC(nVertLevels, nEdges)) - - allocate(u(nVertLevels,nEdges,1)) - allocate(v(nVertLevels,nEdges,1)) - allocate(vh(nVertLevels,nEdges,1)) - allocate(h(nVertLevels,nCells,1)) - allocate(circulation(nVertLevels,nVertices,1)) - allocate(vorticity(nVertLevels,nVertices,1)) - allocate(ke(nVertLevels,nCells,1)) - allocate(tracers(nTracers,nVertLevels,nCells,1)) do iRow = 1, ny do iCol = 1, nx @@ -271,45 +256,14 @@ program hexagonal_periodic_grid end do end do - - ! - ! fill in initial conditions below - ! NOTE: these initial conditions will likely be removed - ! from the grid.nc files at some point (soon). - ! Initialize fields in grid - ! - - fEdge(:) = 1.0e-4 - fVertex(:) = 1.0e-4 - - h_s(:) = 0.0 - u(:,:,:) = 0.0 - v(:,:,:) = 0.0 - vh(:,:,:) = 0.0 - circulation(:,:,:) = 0.0 - vorticity(:,:,:) = 0.0 - ke(:,:,:) = 0.0 - tracers(:,:,:,:) = 0.0 - h(:,:,:) = 1.0 - - do i=1,nCells - r = sqrt((xCell(i) - (nx/2)*(10.0*dc))**2.0 + (yCell(i) - (ny/2)*(10.0*dc))**2.0) - if (r < 10.0*10.0*dc) then - tracers(1,1,i,1) = (20.0 / 2.0) * (1.0 + cos(pi*r/(10.0*10.0*dc))) + 0.0 - h(1,i,1) = 1.0 + 0.1*cos(pi*r/(20.0*10.0*dc)) - else - tracers(1,1,i,1) = 0.0 - h(1,i,1) = 1.0 - end if - end do + meshDensity(:) = 1.0 ! ! Write grid to grid.nc file ! - call write_netcdf_init( nCells, nEdges, nVertices, maxEdges, nVertLevels, nTracers, vertexDegree, dc, nx, ny ) + call write_netcdf_init( nCells, nEdges, nVertices, maxEdges, vertexDegree, dc, nx, ny ) - call write_netcdf_fields( 1, & - latCell, lonCell, xCell, yCell, zCell, indexToCellID, & + call write_netcdf_fields( latCell, lonCell, xCell, yCell, zCell, indexToCellID, & latEdge, lonEdge, xEdge, yEdge, zEdge, indexToEdgeID, & latVertex, lonVertex, xVertex, yVertex, zVertex, indexToVertexID, & cellsOnEdge, & @@ -329,18 +283,7 @@ program hexagonal_periodic_grid edgesOnVertex, & cellsOnVertex, & kiteAreasOnVertex, & - fEdge, & - fVertex, & - h_s, & - uBC, & - u, & - v, & - h, & - vh, & - circulation, & - vorticity, & - ke, & - tracers & + meshDensity & ) call write_netcdf_finalize() @@ -380,21 +323,6 @@ program hexagonal_periodic_grid end program hexagonal_periodic_grid -subroutine enforce_uBC(u, uBC, xCell, yCell, zCell, nCells, nEdges, nVertLevels, dc) -! this suboutine provides a hook into uBC. the uBC field is read into the ocean -! model and used to enforce boundary conditions on the velocity field. -! uBC is written to the grid.nc file, even if the forward model does not use it. - -real (kind=8), intent(in) :: dc -real (kind=8), intent(inout), dimension(nVertLevels, nEdges, 1) :: u -real (kind=8), intent(in), dimension(nCells) :: xCell, yCell, zCell -integer, intent(inout), dimension(nVertLevels, nEdges) :: uBC - -uBC = -10 - -end subroutine enforce_uBC - - subroutine decompose_nproc(nproc, nprocx, nprocy) implicit none diff --git a/grid_gen/periodic_quad/Makefile b/mesh_tools/periodic_quad/Makefile similarity index 100% rename from grid_gen/periodic_quad/Makefile rename to mesh_tools/periodic_quad/Makefile diff --git a/grid_gen/periodic_quad/cells.ncl b/mesh_tools/periodic_quad/cells.ncl similarity index 100% rename from grid_gen/periodic_quad/cells.ncl rename to mesh_tools/periodic_quad/cells.ncl diff --git a/grid_gen/periodic_quad/module_cell_indexing.F b/mesh_tools/periodic_quad/module_cell_indexing.F similarity index 100% rename from grid_gen/periodic_quad/module_cell_indexing.F rename to mesh_tools/periodic_quad/module_cell_indexing.F diff --git a/grid_gen/periodic_quad/module_write_netcdf.F b/mesh_tools/periodic_quad/module_write_netcdf.F similarity index 100% rename from grid_gen/periodic_quad/module_write_netcdf.F rename to mesh_tools/periodic_quad/module_write_netcdf.F diff --git a/grid_gen/periodic_quad/namelist.input b/mesh_tools/periodic_quad/namelist.input similarity index 100% rename from grid_gen/periodic_quad/namelist.input rename to mesh_tools/periodic_quad/namelist.input diff --git a/grid_gen/periodic_quad/periodic_grid.F b/mesh_tools/periodic_quad/periodic_grid.F similarity index 100% rename from grid_gen/periodic_quad/periodic_grid.F rename to mesh_tools/periodic_quad/periodic_grid.F diff --git a/grid_gen/planar_grid_transformations/multires_scaled_hex.py b/mesh_tools/planar_grid_transformations/multires_scaled_hex.py similarity index 96% rename from grid_gen/planar_grid_transformations/multires_scaled_hex.py rename to mesh_tools/planar_grid_transformations/multires_scaled_hex.py index f52f3c9a8..e051521af 100755 --- a/grid_gen/planar_grid_transformations/multires_scaled_hex.py +++ b/mesh_tools/planar_grid_transformations/multires_scaled_hex.py @@ -15,6 +15,9 @@ Phillip J. Wolfram 12/22/2015 """ + +from __future__ import absolute_import, division, print_function, unicode_literals + import matplotlib.pyplot as plt import numpy as np import numexpr as ne @@ -64,7 +67,7 @@ def multires_scaled_hex(infname, outfname, xc=25000/2.0, yc=50000/2.0, radius=50 vertices = np.unique(vertices).tolist() for i in 1+np.arange(ntimes): - print 'Processing layer %d of %d...'%(i,ntimes) + print('Processing layer %d of %d...'%(i,ntimes)) for acell in cells[:]: for cellneighs in ds.variables['cellsOnCell'][acell]-1: cells.append(cellneighs) @@ -99,7 +102,7 @@ def multires_scaled_hex(infname, outfname, xc=25000/2.0, yc=50000/2.0, radius=50 plt.axis('equal') plt.show() - print 'done!' + print('done!') # compute vertex locations from circumcenters to ensure grid is Voronoi interior = np.prod(ds.variables['cellsOnVertex'][:],axis=1) > 0 @@ -109,7 +112,7 @@ def multires_scaled_hex(infname, outfname, xc=25000/2.0, yc=50000/2.0, radius=50 verticesOnCell[ic,nedge:] = np.nan for nl in np.arange(nllyod): - print 'On iteration %d of %d'%(nl+1, nllyod) + print('On iteration %d of %d'%(nl+1, nllyod)) if nl > 0: # update xc generators to be centroid of cells xc = np.nanmean(x[verticesOnCell], axis=1) @@ -140,7 +143,7 @@ def multires_scaled_hex(infname, outfname, xc=25000/2.0, yc=50000/2.0, radius=50 ds.close() - print 'finished grid' + print('finished grid') if __name__ == "__main__": diff --git a/grid_gen/planar_grid_transformations/rotate_planar_grid.readme.txt b/mesh_tools/planar_grid_transformations/rotate_planar_grid.readme.txt similarity index 100% rename from grid_gen/planar_grid_transformations/rotate_planar_grid.readme.txt rename to mesh_tools/planar_grid_transformations/rotate_planar_grid.readme.txt diff --git a/grid_gen/planar_grid_transformations/scale_planar_grid.py b/mesh_tools/planar_grid_transformations/scale_planar_grid.py similarity index 87% rename from grid_gen/planar_grid_transformations/scale_planar_grid.py rename to mesh_tools/planar_grid_transformations/scale_planar_grid.py index 8f8c446db..5327c583e 100755 --- a/grid_gen/planar_grid_transformations/scale_planar_grid.py +++ b/mesh_tools/planar_grid_transformations/scale_planar_grid.py @@ -1,5 +1,10 @@ #!/usr/bin/env python -import numpy, math +""" +This script scales the grid of an existing MPAS mesh by a scalar amount. +""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import sys from netCDF4 import Dataset as NetCDFFile from optparse import OptionParser from datetime import datetime @@ -12,7 +17,7 @@ if not options.filename: parser.error("A grid file is required.") -print "Applying scale factor of: ", options.scale +print("Applying scale factor of: ", options.scale) scale = float(options.scale) @@ -45,5 +50,6 @@ newhist = thiscommand setattr(grid, 'history', newhist ) - grid.close() + +print("Scale operation is complete.") diff --git a/grid_gen/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py b/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py similarity index 80% rename from grid_gen/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py rename to mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py index 3cb035e7b..32688c2f9 100755 --- a/grid_gen/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py +++ b/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py @@ -3,10 +3,11 @@ Take MPAS planar grid and populate the lat/lon fields based on a specified projection. ''' +from __future__ import absolute_import, division, print_function, unicode_literals + import sys import netCDF4 import pyproj -#import numpy as np from optparse import OptionParser from datetime import datetime @@ -16,7 +17,7 @@ projections = dict() # add more as needed: -# CISM's projection is as follows, with the vertical datum as EIGEN-GL04C geoid. +# CISM's projection is as follows, with the vertical datum as EIGEN-GL04C geoid. # datum is actually EIGEN-GL04C but that is not an option in Proj. Therefore using EGM08 which should be within ~1m everywhere (and 10-20 cm in most places) # NOTE!!!!!! egm08_25.gtx can be downloaded from: http://download.osgeo.org/proj/vdatum/egm08_25/egm08_25.gtx and the path in the projection specification line should point to it! #projections['gis-bamber'] = pyproj.Proj('+proj=stere +lat_ts=71.0 +lat_0=90 +lon_0=321.0 +k_0=1.0 +x_0=800000.0 +y_0=3400000.0 +geoidgrids=./egm08_25.gtx') @@ -35,28 +36,28 @@ -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser() parser.description = "This script populates the MPAS lat and lon fields based on the projection specified by the -p option." parser.add_option("-f", "--file", dest="fileInName", help="MPAS land ice file name.", default="landice_grid.nc", metavar="FILENAME") -parser.add_option("-p", "--proj", dest="projection", help="projection used for the data. Valid options are: \n" + str(projections.keys()), metavar="PROJ") +parser.add_option("-p", "--proj", dest="projection", help="projection used for the data. Valid options are: \n" + str(list(projections.keys())), metavar="PROJ") for option in parser.option_list: if option.default != ("NO", "DEFAULT"): option.help += (" " if option.help else "") + "[default: %default]" options, args = parser.parse_args() if not options.projection: - sys.exit('Error: data projection required with -p or --proj command line argument. Valid options are: ' + str(projections.keys())) + sys.exit('Error: data projection required with -p or --proj command line argument. Valid options are: ' + str(list(projections.keys()))) if not options.fileInName: - print "No filename specified, so using 'landice_grid.nc'." + print("No filename specified, so using 'landice_grid.nc'.") options.fileInName = 'landice_grid.nc' -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output # ================================================= -print "Using {} projection, defined as: {}".format(options.projection, projections[options.projection].srs) +print("Using {} projection, defined as: {}".format(options.projection, projections[options.projection].srs)) # get needed fields f = netCDF4.Dataset(options.fileInName, 'r+') @@ -74,8 +75,8 @@ latEdge = f.variables['latEdge'] lonEdge = f.variables['lonEdge'] -print "Input file xCell min/max values:", xCell[:].min(), xCell[:].max() -print "Input file yCell min/max values:", yCell[:].min(), yCell[:].max() +print("Input file xCell min/max values:", xCell[:].min(), xCell[:].max()) +print("Input file yCell min/max values:", yCell[:].min(), yCell[:].max()) # populate x,y fields # MPAS uses lat/lon in radians, so have pyproj return fields in radians. @@ -83,8 +84,8 @@ lonVertex[:], latVertex[:] = pyproj.transform(projections[options.projection], projections['latlon'], xVertex[:], yVertex[:], radians=True) lonEdge[:], latEdge[:] = pyproj.transform(projections[options.projection], projections['latlon'], xEdge[:], yEdge[:], radians=True) -print "Calculated latCell min/max values (radians):", latCell[:].min(), latCell[:].max() -print "Calculated lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max() +print("Calculated latCell min/max values (radians):", latCell[:].min(), latCell[:].max()) +print("Calculated lonCell min/max values (radians):", lonCell[:].min(), lonCell[:].max()) # Update history attribute of netCDF file thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) @@ -97,4 +98,4 @@ f.close() -print "Lat/lon calculations completed." +print("Lat/lon calculations completed. File has been written.") diff --git a/grid_gen/planar_grid_transformations/translate_planar_grid.py b/mesh_tools/planar_grid_transformations/translate_planar_grid.py similarity index 83% rename from grid_gen/planar_grid_transformations/translate_planar_grid.py rename to mesh_tools/planar_grid_transformations/translate_planar_grid.py index 0541039c6..b0e9f35a5 100755 --- a/grid_gen/planar_grid_transformations/translate_planar_grid.py +++ b/mesh_tools/planar_grid_transformations/translate_planar_grid.py @@ -2,15 +2,15 @@ ''' Translate planar MPAS grid by one of three methods ''' +from __future__ import absolute_import, division, print_function, unicode_literals import sys import netCDF4 -#import numpy as np from optparse import OptionParser from datetime import datetime -print "== Gathering information. (Invoke with --help for more details. All arguments are optional)" +print("== Gathering information. (Invoke with --help for more details. All arguments are optional)") parser = OptionParser() parser.description = ("This script translates the coordinate system of the planar MPAS mesh specified with the -f flag. " "There are 3 possible methods to choose from:" @@ -27,7 +27,7 @@ option.help += (" " if option.help else "") + "[default: %default]" options, args = parser.parse_args() -print "Attempting to translate coordinates in file: %s"%options.fileInName +print("Attempting to translate coordinates in file: {}".format(options.fileInName)) if options.dataFileName and (options.xshift or options.yshift): @@ -44,17 +44,17 @@ if options.dataFileName: method = 'file' - print " Translating coordinates in %s so the domain center matches the domain center in %s."%(options.fileInName, options.dataFileName) + print(" Translating coordinates in {} so the domain center matches the domain center in {}.".format(options.fileInName, options.dataFileName)) if options.xshift or options.yshift: method = 'xy' - print " Translating coordinates in %s by user-specified values. X-shift=%f; Y-shift=%f"%(options.fileInName, options.xshift, options.yshift) + print(" Translating coordinates in {} by user-specified values. X-shift={}; Y-shift={}".format(options.fileInName, options.xshift, options.yshift)) -if options.center: +if options.center: method = 'center' - print " Translating coordinates in %s so the origin is the center of the domain." + print(" Translating coordinates in %s so the origin is the center of the domain.") -print '' # make a space in stdout before further output +print('') # make a space in stdout before further output # ================================================= @@ -114,4 +114,4 @@ f.close() -print "Translation completed." +print("Translation completed.") diff --git a/mesh_tools/planar_hex/mpas_tools b/mesh_tools/planar_hex/mpas_tools new file mode 120000 index 000000000..627733f3b --- /dev/null +++ b/mesh_tools/planar_hex/mpas_tools @@ -0,0 +1 @@ +../../conda_package/mpas_tools/ \ No newline at end of file diff --git a/mesh_tools/planar_hex/planar_hex b/mesh_tools/planar_hex/planar_hex new file mode 100755 index 000000000..0af1f7a8c --- /dev/null +++ b/mesh_tools/planar_hex/planar_hex @@ -0,0 +1,11 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +import re +import sys + +from mpas_tools.planar_hex import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/grid_gen/points-mpas/.gitignore b/mesh_tools/points-mpas/.gitignore similarity index 100% rename from grid_gen/points-mpas/.gitignore rename to mesh_tools/points-mpas/.gitignore diff --git a/grid_gen/points-mpas/Makefile b/mesh_tools/points-mpas/Makefile similarity index 100% rename from grid_gen/points-mpas/Makefile rename to mesh_tools/points-mpas/Makefile diff --git a/grid_gen/points-mpas/Params b/mesh_tools/points-mpas/Params similarity index 100% rename from grid_gen/points-mpas/Params rename to mesh_tools/points-mpas/Params diff --git a/grid_gen/points-mpas/README b/mesh_tools/points-mpas/README similarity index 100% rename from grid_gen/points-mpas/README rename to mesh_tools/points-mpas/README diff --git a/grid_gen/points-mpas/SaveDensity b/mesh_tools/points-mpas/SaveDensity similarity index 100% rename from grid_gen/points-mpas/SaveDensity rename to mesh_tools/points-mpas/SaveDensity diff --git a/grid_gen/points-mpas/SaveTriangles b/mesh_tools/points-mpas/SaveTriangles similarity index 100% rename from grid_gen/points-mpas/SaveTriangles rename to mesh_tools/points-mpas/SaveTriangles diff --git a/grid_gen/points-mpas/SaveVertices b/mesh_tools/points-mpas/SaveVertices similarity index 100% rename from grid_gen/points-mpas/SaveVertices rename to mesh_tools/points-mpas/SaveVertices diff --git a/grid_gen/points-mpas/points-mpas.cpp b/mesh_tools/points-mpas/points-mpas.cpp similarity index 100% rename from grid_gen/points-mpas/points-mpas.cpp rename to mesh_tools/points-mpas/points-mpas.cpp diff --git a/grid_gen/points-mpas/triangulation.h b/mesh_tools/points-mpas/triangulation.h similarity index 100% rename from grid_gen/points-mpas/triangulation.h rename to mesh_tools/points-mpas/triangulation.h diff --git a/python_scripts/processor_decompositions/README b/mesh_tools/processor_decompositions/README similarity index 100% rename from python_scripts/processor_decompositions/README rename to mesh_tools/processor_decompositions/README diff --git a/python_scripts/processor_decompositions/make_partition_files.py b/mesh_tools/processor_decompositions/make_partition_files.py similarity index 100% rename from python_scripts/processor_decompositions/make_partition_files.py rename to mesh_tools/processor_decompositions/make_partition_files.py diff --git a/grid_gen/seaice_grid_tools/README b/mesh_tools/seaice_grid_tools/README similarity index 100% rename from grid_gen/seaice_grid_tools/README rename to mesh_tools/seaice_grid_tools/README diff --git a/grid_gen/seaice_grid_tools/gen_seaice_mesh_partition.py b/mesh_tools/seaice_grid_tools/gen_seaice_mesh_partition.py similarity index 100% rename from grid_gen/seaice_grid_tools/gen_seaice_mesh_partition.py rename to mesh_tools/seaice_grid_tools/gen_seaice_mesh_partition.py diff --git a/grid_gen/seaice_grid_tools/gen_seaice_regions_latitude_bands.py b/mesh_tools/seaice_grid_tools/gen_seaice_regions_latitude_bands.py similarity index 100% rename from grid_gen/seaice_grid_tools/gen_seaice_regions_latitude_bands.py rename to mesh_tools/seaice_grid_tools/gen_seaice_regions_latitude_bands.py diff --git a/grid_gen/triangle_jigsaw_to_netcdf/.gitignore b/mesh_tools/triangle_jigsaw_to_netcdf/.gitignore similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/.gitignore rename to mesh_tools/triangle_jigsaw_to_netcdf/.gitignore diff --git a/grid_gen/triangle_jigsaw_to_netcdf/README.md b/mesh_tools/triangle_jigsaw_to_netcdf/README.md similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/README.md rename to mesh_tools/triangle_jigsaw_to_netcdf/README.md diff --git a/grid_gen/triangle_jigsaw_to_netcdf/build_mesh.sh b/mesh_tools/triangle_jigsaw_to_netcdf/build_mesh.sh similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/build_mesh.sh rename to mesh_tools/triangle_jigsaw_to_netcdf/build_mesh.sh diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/antarctic_coupled.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/antarctic_coupled_vis.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/coast_regional.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/coast_regional.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/coast_regional.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/coast_regional.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/jigsaw_path_locations.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/mpas_ec_60to30.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/mpas_uniform.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/mpas_uniform.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/mpas_uniform.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/mpas_uniform.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/refined_disk.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/refined_disk.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/refined_disk.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/refined_disk.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/soma_32to4km.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/soma_32to4km.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/soma_32to4km.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/soma_32to4km.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/examples/soma_32to8km.m b/mesh_tools/triangle_jigsaw_to_netcdf/examples/soma_32to8km.m similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/examples/soma_32to8km.m rename to mesh_tools/triangle_jigsaw_to_netcdf/examples/soma_32to8km.m diff --git a/grid_gen/triangle_jigsaw_to_netcdf/inject_bathymetry.py b/mesh_tools/triangle_jigsaw_to_netcdf/inject_bathymetry.py similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/inject_bathymetry.py rename to mesh_tools/triangle_jigsaw_to_netcdf/inject_bathymetry.py diff --git a/grid_gen/triangle_jigsaw_to_netcdf/mpas_to_triangle.py b/mesh_tools/triangle_jigsaw_to_netcdf/mpas_to_triangle.py similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/mpas_to_triangle.py rename to mesh_tools/triangle_jigsaw_to_netcdf/mpas_to_triangle.py diff --git a/grid_gen/triangle_jigsaw_to_netcdf/open_msh.py b/mesh_tools/triangle_jigsaw_to_netcdf/open_msh.py similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/open_msh.py rename to mesh_tools/triangle_jigsaw_to_netcdf/open_msh.py diff --git a/grid_gen/triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py b/mesh_tools/triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py similarity index 100% rename from grid_gen/triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py rename to mesh_tools/triangle_jigsaw_to_netcdf/triangle_jigsaw_to_netcdf.py diff --git a/ocean/add_depth_coord/add_depth.py b/ocean/add_depth_coord/add_depth.py new file mode 100755 index 000000000..1382ecce1 --- /dev/null +++ b/ocean/add_depth_coord/add_depth.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python + +""" +Add a 1D coordinate "depth" to an MPAS-Ocean output file that defines the +positive-up vertical location of each layer. +""" +# Authors +# ------- +# Xylar Asay-Davis + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import numpy +import argparse +import sys +from datetime import datetime + + +def write_netcdf(ds, fileName): + ''' + Write an xarray data set to a NetCDF file making use of the _FillValue + attributes of each variable. This function should be used for data sets + opened with mask_and_scale=False. + + Parameters + ---------- + ds : xarray.Dataset object + The xarray data set to be written to a file + + fileName : str + The fileName to write the data set to + ''' + # Authors + # ------- + # Xylar Asay-Davis + + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + if '_FillValue' in ds[variableName].attrs: + encodingDict[variableName] = \ + {'_FillValue': ds[variableName].attrs['_FillValue']} + del ds[variableName].attrs['_FillValue'] + else: + encodingDict[variableName] = {'_FillValue': None} + + ds.to_netcdf(fileName, encoding=encodingDict) + + +def compute_depth(refBottomDepth): + """ + Computes depth and depth bounds given refBottomDepth + + Parameters + ---------- + refBottomDepth : ``xarray.DataArray`` + the depth of the bottom of each vertical layer in the initial state + (perfect z-level coordinate) + + Returns + ------- + depth : ``xarray.DataArray`` + the vertical coordinate defining the middle of each layer + depth_bnds : ``xarray.DataArray`` + the vertical coordinate defining the top and bottom of each layer + """ + # Authors + # ------- + # Xylar Asay-Davis + + refBottomDepth = refBottomDepth.values + + depth_bnds = numpy.zeros((len(refBottomDepth), 2)) + + depth_bnds[0, 0] = 0. + depth_bnds[1:, 0] = refBottomDepth[0:-1] + depth_bnds[:, 1] = refBottomDepth + depth = 0.5*(depth_bnds[:, 0] + depth_bnds[:, 1]) + + return depth, depth_bnds + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-c", "--coordFileName", dest="coordFileName", + type=str, required=False, + help="A MPAS-Ocean file with refBottomDepth") + parser.add_argument("-i", "--inFileName", dest="inFileName", type=str, + required=True, + help="An input MPAS-Ocean file that depth should be" + "added to, used for coords if another file is" + "not provided via -c.") + parser.add_argument("-o", "--outFileName", dest="outFileName", type=str, + required=True, + help="An output MPAS-Ocean file with depth added") + args = parser.parse_args() + + if args.coordFileName: + coordFileName = args.coordFileName + else: + coordFileName = args.inputFileName + + ds = xarray.open_dataset(args.inFileName, mask_and_scale=False) + if 'nVertLevels' in ds.dims: + ds = ds.rename({'nVertLevels': 'depth'}) + + dsCoord = xarray.open_dataset(coordFileName, mask_and_scale=False) + dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) + + depth, depth_bnds = compute_depth(dsCoord.refBottomDepth) + ds.coords['depth'] = ('depth', depth) + ds.depth.attrs['long_name'] = 'reference depth of the center of ' \ + 'each vertical level' + ds.depth.attrs['standard_name'] = 'depth' + ds.depth.attrs['units'] = 'meters' + ds.depth.attrs['axis'] = 'Z' + ds.depth.attrs['positive'] = 'down' + ds.depth.attrs['valid_min'] = depth_bnds[0, 0] + ds.depth.attrs['valid_max'] = depth_bnds[-1, 1] + ds.depth.attrs['bounds'] = 'depth_bnds' + + ds.coords['depth_bnds'] = (('depth', 'nbnd'), depth_bnds) + ds.depth_bnds.attrs['long_name'] = 'Gridcell depth interfaces' + + for varName in ds.data_vars: + var = ds[varName] + if 'depth' in var.dims: + var = var.assign_coords(depth=ds.depth) + ds[varName] = var + + time = datetime.now().strftime('%c') + + history = '{}: {}'.format(time, ' '.join(sys.argv)) + + if 'history' in ds.attrs: + ds.attrs['history'] = '{}\n{}'.format(history, + ds.attrs['history']) + else: + ds.attrs['history'] = history + + write_netcdf(ds, args.outFileName) + + +if __name__ == '__main__': + main() diff --git a/ocean/add_depth_coord/add_zMid.py b/ocean/add_depth_coord/add_zMid.py new file mode 100755 index 000000000..b1c8d7ee1 --- /dev/null +++ b/ocean/add_depth_coord/add_zMid.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python + +""" +Add a 3D coordinate "zMid" to an MPAS-Ocean output file that defines the +positive-up vertical location of each cell center. +""" +# Authors +# ------- +# Xylar Asay-Davis + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import numpy +import netCDF4 +import argparse +import sys +from datetime import datetime + + +def write_netcdf(ds, fileName): + ''' + Write an xarray data set to a NetCDF file making use of the _FillValue + attributes of each variable. This function should be used for data sets + opened with mask_and_scale=False. + + Parameters + ---------- + ds : xarray.Dataset object + The xarray data set to be written to a file + + fileName : str + The fileName to write the data set to + ''' + # Authors + # ------- + # Xylar Asay-Davis + + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + if '_FillValue' in ds[variableName].attrs: + encodingDict[variableName] = \ + {'_FillValue': ds[variableName].attrs['_FillValue']} + del ds[variableName].attrs['_FillValue'] + else: + encodingDict[variableName] = {'_FillValue': None} + + ds.to_netcdf(fileName, encoding=encodingDict) + + +def compute_zmid(bottomDepth, maxLevelCell, layerThickness): + """ + Computes zMid given data arrays for bottomDepth, maxLevelCell and + layerThickness + + Parameters + ---------- + bottomDepth : ``xarray.DataArray`` + the depth of the ocean bottom (positive) + + maxLevelCell : ``xarray.DataArray`` + the 1-based vertical index of the bottom of the ocean + + layerThickness : ``xarray.DataArray`` + the thickness of MPAS-Ocean layers (possibly as a function of time) + + Returns + ------- + zMid : ``xarray.DataArray`` + the vertical coordinate defining the middle of each layer, masked below + the bathymetry + """ + # Authors + # ------- + # Xylar Asay-Davis + + nDepth = layerThickness.sizes['depth'] + + vertIndex = \ + xarray.DataArray.from_dict({'dims': ('depth',), + 'data': numpy.arange(nDepth)}) + + layerThickness = layerThickness.where(vertIndex < maxLevelCell) + + thicknessSum = layerThickness.sum(dim='depth') + thicknessCumSum = layerThickness.cumsum(dim='depth') + zSurface = -bottomDepth+thicknessSum + + zLayerBot = zSurface - thicknessCumSum + + zMid = zLayerBot + 0.5*layerThickness + + zMid = zMid.where(vertIndex < maxLevelCell) + zMid = zMid.transpose('Time', 'nCells', 'depth') + + return zMid + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-c", "--coordFileName", dest="coordFileName", + type=str, required=False, + help="A MPAS-Ocean file with bottomDepth, maxLevelCell" + "and layerThickness but not zMid") + parser.add_argument("-i", "--inFileName", dest="inFileName", type=str, + required=True, + help="An input MPAS-Ocean file that zMid should be" + "added to, used for coords if another file is" + "not provided via -c.") + parser.add_argument("-o", "--outFileName", dest="outFileName", type=str, + required=True, + help="An output MPAS-Ocean file with zMid added") + args = parser.parse_args() + + if args.coordFileName: + coordFileName = args.coordFileName + else: + coordFileName = args.inputFileName + + ds = xarray.open_dataset(args.inFileName, mask_and_scale=False) + if 'nVertLevels' in ds.dims: + ds = ds.rename({'nVertLevels': 'depth'}) + + # dsCoord doesn't have masking disabled because we want it for zMid + dsCoord = xarray.open_dataset(coordFileName) + dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) + + ds.coords['zMid'] = compute_zmid(dsCoord.bottomDepth, + dsCoord.maxLevelCell, + dsCoord.layerThickness) + fillValue = netCDF4.default_fillvals['f8'] + ds.coords['zMid'] = ds.zMid.where(ds.zMid.notnull(), other=fillValue) + ds.zMid.attrs['units'] = 'meters' + ds.zMid.attrs['positive'] = 'up' + ds.zMid.attrs['_FillValue'] = fillValue + + for varName in ds.data_vars: + var = ds[varName] + if 'nCells' in var.dims and 'depth' in var.dims: + var = var.assign_coords(zMid=ds.zMid) + ds[varName] = var + + time = datetime.now().strftime('%c') + + history = '{}: {}'.format(time, ' '.join(sys.argv)) + + if 'history' in ds.attrs: + ds.attrs['history'] = '{}\n{}'.format(history, + ds.attrs['history']) + else: + ds.attrs['history'] = history + + write_netcdf(ds, args.outFileName) + + +if __name__ == '__main__': + main() diff --git a/ocean/add_depth_coord/write_time_varying_zMid.py b/ocean/add_depth_coord/write_time_varying_zMid.py new file mode 100755 index 000000000..777f5ea22 --- /dev/null +++ b/ocean/add_depth_coord/write_time_varying_zMid.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python + +""" +Write a 3D coordinate "zMid" (optionally with a prefix such as +"timeMonthly_avg_") to an MPAS-Ocean output file. zMid defines the +positive-up vertical location of each cell center. +""" +# Authors +# ------- +# Xylar Asay-Davis + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import numpy +import netCDF4 +import argparse +import sys +from datetime import datetime + + +def write_netcdf(ds, fileName): + ''' + Write an xarray data set to a NetCDF file making use of the _FillValue + attributes of each variable. This function should be used for data sets + opened with mask_and_scale=False. + + Parameters + ---------- + ds : xarray.Dataset object + The xarray data set to be written to a file + + fileName : str + The fileName to write the data set to + ''' + # Authors + # ------- + # Xylar Asay-Davis + + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + if '_FillValue' in ds[variableName].attrs: + encodingDict[variableName] = \ + {'_FillValue': ds[variableName].attrs['_FillValue']} + del ds[variableName].attrs['_FillValue'] + else: + encodingDict[variableName] = {'_FillValue': None} + + ds.to_netcdf(fileName, encoding=encodingDict) + + +def compute_zmid(bottomDepth, maxLevelCell, layerThickness): + """ + Computes zMid given data arrays for bottomDepth, maxLevelCell and + layerThickness + + Parameters + ---------- + bottomDepth : ``xarray.DataArray`` + the depth of the ocean bottom (positive) + + maxLevelCell : ``xarray.DataArray`` + the 1-based vertical index of the bottom of the ocean + + layerThickness : ``xarray.DataArray`` + the thickness of MPAS-Ocean layers (possibly as a function of time) + + Returns + ------- + zMid : ``xarray.DataArray`` + the vertical coordinate defining the middle of each layer, masked below + the bathymetry + """ + # Authors + # ------- + # Xylar Asay-Davis + + nDepth = layerThickness.sizes['depth'] + + vertIndex = \ + xarray.DataArray.from_dict({'dims': ('depth',), + 'data': numpy.arange(nDepth)}) + + layerThickness = layerThickness.where(vertIndex < maxLevelCell) + + thicknessSum = layerThickness.sum(dim='depth') + thicknessCumSum = layerThickness.cumsum(dim='depth') + zSurface = -bottomDepth+thicknessSum + + zLayerBot = zSurface - thicknessCumSum + + zMid = zLayerBot + 0.5*layerThickness + + zMid = zMid.where(vertIndex < maxLevelCell) + zMid = zMid.transpose('Time', 'nCells', 'depth') + + return zMid + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-c", "--coordFileName", dest="coordFileName", + type=str, required=False, + help="A MPAS-Ocean file with bottomDepth and " + "maxLevelCell") + parser.add_argument("-i", "--inFileName", dest="inFileName", type=str, + required=True, + help="An input MPAS-Ocean file with some form of" + "layerThickness, and also bottomDepth and" + "maxLevelCell if no coordinate file is provided.") + parser.add_argument("-o", "--outFileName", dest="outFileName", type=str, + required=True, + help="An output MPAS-Ocean file with zMid for each" + "time in the input file") + parser.add_argument("-p", "--prefix", dest="prefix", type=str, + required=False, default="", + help="A prefix on layerThickness (in) and zMid (out)," + "such as 'timeMonthly_avg_'") + args = parser.parse_args() + + if args.coordFileName: + coordFileName = args.coordFileName + else: + coordFileName = args.inputFileName + + dsCoord = xarray.open_dataset(coordFileName) + dsCoord = dsCoord.rename({'nVertLevels': 'depth'}) + + dsIn = xarray.open_dataset(args.inFileName) + dsIn = dsIn.rename({'nVertLevels': 'depth'}) + inVarName = '{}layerThickness'.format(args.prefix) + outVarName = '{}zMid'.format(args.prefix) + layerThickness = dsIn[inVarName] + + zMid = compute_zmid(dsCoord.bottomDepth, dsCoord.maxLevelCell, + layerThickness) + + dsOut = xarray.Dataset() + dsOut[outVarName] = zMid + fillValue = netCDF4.default_fillvals['f8'] + dsOut[outVarName] = dsOut[outVarName].where(dsOut[outVarName].notnull(), + other=fillValue) + dsOut[outVarName].attrs['units'] = 'meters' + dsOut[outVarName].attrs['positive'] = 'up' + dsOut[outVarName].attrs['_FillValue'] = fillValue + + time = datetime.now().strftime('%c') + + history = '{}: {}'.format(time, ' '.join(sys.argv)) + dsOut.attrs['history'] = history + + write_netcdf(dsOut, args.outFileName) + + +if __name__ == '__main__': + main() diff --git a/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py b/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py new file mode 100755 index 000000000..76286b7a5 --- /dev/null +++ b/ocean/coastline_alteration/add_critical_land_blockages_to_mask.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +""" +Name: add_critical_land_blockages_to_mask.py +Author: Xylar Asay-Davis + +Add transects that identify critical regions where narrow strips of land block +ocean flow. These are, essentially, the opposite of critical passages, which +must remain open for ocean flow. +""" + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import argparse + +from mpas_tools.ocean.coastline_alteration import add_critical_land_blockages + + +if __name__ == '__main__': + parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-f", "--input_mask_file", dest="input_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="INPUTMASKFILE", required=True) + parser.add_argument("-o", "--output_mask_file", + dest="output_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="OUTPUTMASKFILE", required=True) + parser.add_argument("-b", "--blockage_file", dest="blockage_file", + help="Masks for each transect identifying critical " + "land blockage.", metavar="BLOCKFILE", + required=True) + args = parser.parse_args() + + dsMask = xarray.open_dataset(args.input_mask_filename) + + dsBlockages = xarray.open_dataset(args.blockage_file) + + dsMask = add_critical_land_blockages(dsMask, dsBlockages) + dsMask.to_netcdf(args.output_mask_filename) diff --git a/ocean/coastline_alteration/add_land_locked_cells_to_mask.py b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py new file mode 100755 index 000000000..799d211fe --- /dev/null +++ b/ocean/coastline_alteration/add_land_locked_cells_to_mask.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +""" +Name: add_land_locked_cells_to_mask.py +Author: Mark Petersen, Adrian Turner, Xylar Asay-Davis + +Find ocean cells that are land-locked, and alter the cell +mask so that they are counted as land cells. +""" +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import argparse +import xarray + +from mpas_tools.ocean.coastline_alteration import add_land_locked_cells_to_mask + +if __name__ == '__main__': + parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-f", "--input_mask_file", dest="input_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="INPUTMASKFILE", required=True) + parser.add_argument("-o", "--output_mask_file", + dest="output_mask_filename", + help="Mask file that includes cell and edge masks.", + metavar="OUTPUTMASKFILE", required=True) + parser.add_argument("-m", "--mesh_file", dest="mesh_filename", + help="MPAS Mesh filename.", metavar="MESHFILE", + required=True) + parser.add_argument("-l", "--latitude_threshold", + dest="latitude_threshold", + help="Minimum latitude, in degrees, for transect " + "widening.", + required=False, type=float, default=43.0) + parser.add_argument("-n", "--number_sweeps", dest="nSweeps", + help="Maximum number of sweeps to search for " + "land-locked cells.", + required=False, type=int, default=10) + args = parser.parse_args() + + dsMask = xarray.open_dataset(args.input_mask_filename) + + dsMesh = xarray.open_dataset(args.mesh_filename) + + dsMask = add_land_locked_cells_to_mask(dsMask, dsMesh, + args.latitude_threshold, + args.nSweeps) + dsMask.to_netcdf(args.output_mask_filename) diff --git a/ocean/coastline_alteration/mpas_tools b/ocean/coastline_alteration/mpas_tools new file mode 120000 index 000000000..627733f3b --- /dev/null +++ b/ocean/coastline_alteration/mpas_tools @@ -0,0 +1 @@ +../../conda_package/mpas_tools/ \ No newline at end of file diff --git a/ocean/coastline_alteration/widen_transect_edge_masks.py b/ocean/coastline_alteration/widen_transect_edge_masks.py new file mode 100755 index 000000000..1d4bc067c --- /dev/null +++ b/ocean/coastline_alteration/widen_transect_edge_masks.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +""" +Name: widen_transect_edge_masks.py +Author: Mark Petersen, Xylar Asay-Davis + +Alter transects to be at least two cells wide. This is used for critical +passages, to avoid sea ice blockage. Specifically, mark cells on both sides +of each transect edge mask as a water cell. +""" +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import argparse +import xarray + +from mpas_tools.ocean.coastline_alteration import widen_transect_edge_masks + + +if __name__ == '__main__': + + parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-f", "--mask_file", dest="mask_filename", + help="Mask file with cell and edge transect masks.", + metavar="MASKFILE", + required=True) + parser.add_argument("-m", "--mesh_file", dest="mesh_filename", + help="MPAS Mesh filename.", metavar="MESHFILE", + required=True) + parser.add_argument("-o", "--out_file", dest="out_filename", + help="Output mask file,different from input filename.", + metavar="MASKFILE", + required=True) + parser.add_argument("-l", "--latitude_threshold", + dest="latitude_threshold", + help="Minimum latitude, degrees, for transect " + "widening.", + required=False, type=float, default=43.0) + args = parser.parse_args() + + dsMask = xarray.open_dataset(args.mask_filename) + + dsMesh = xarray.open_dataset(args.mesh_filename) + + dsMask = widen_transect_edge_masks(dsMask, dsMesh, args.latitude_threshold) + dsMask.to_netcdf(args.out_filename) diff --git a/grid_gen/initial_condition_interpolation/interpolate_state.py b/ocean/initial_condition_interpolation/interpolate_state.py similarity index 100% rename from grid_gen/initial_condition_interpolation/interpolate_state.py rename to ocean/initial_condition_interpolation/interpolate_state.py diff --git a/python_scripts/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py b/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py similarity index 59% rename from python_scripts/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py rename to ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py index 5c89e59b2..99158622f 100755 --- a/python_scripts/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py +++ b/ocean/moc_southern_boundary_extractor/moc_southern_boundary_extractor.py @@ -9,96 +9,71 @@ boundary transect data on cells is not foreseen. Author: Xylar Asay-Davis -last modified: 11/02/2016 +last modified: 5/22/2018 ''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals import xarray import argparse import numpy -def extractSouthernBounary(mesh, moc, latBuffer): - # Extrcts the southern boundary of each region mask in moc. Mesh info - # is taken from mesh. latBuffer is a number of radians above the southern- - # most point that should be considered to definitely be in the southern - # boundary. - - def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): - # Follows the boundary from a starting edge to produce a sequence of - # edges that form a closed loop. - # - # startEdge is an edge on the boundary that will be both the start and - # end of the loop. - # - # isBoundaryEdge is a mask that indicates which edges are on the - # boundary - # - # returns lists of edges, edge signs and vertices - - boundaryEdgesOnEdge = -numpy.ones((nEdges, 2), int) - - boundaryEdges = numpy.arange(nEdges)[isBoundaryEdge] - nBoundaryEdges = len(boundaryEdges) - - # Find the edges on vertex of the vertices on each boundary edge. - # Each boundary edge must have valid vertices, so none should be out - # of bounds. - edgesOnVerticesOnBoundaryEdge = \ - edgesOnVertex[verticesOnEdge[boundaryEdges, :], :] - - # The (typically 3) edges on each vertex of a boundary edge - # will be the edge itself, another boundary edge and 1 or more - # non-boundary edges. We want only the other boundary edge - - # other edge not be this edge - mask = numpy.not_equal(edgesOnVerticesOnBoundaryEdge, - boundaryEdges.reshape((nBoundaryEdges, 1, 1))) - - # other edge must be in range - mask = numpy.logical_and(mask, edgesOnVerticesOnBoundaryEdge >= 0) - mask = numpy.logical_and(mask, edgesOnVerticesOnBoundaryEdge < nEdges) - - # other edge must be a boundary edge - otherEdgeMask = mask.copy() - otherEdgeMask[mask] = \ - isBoundaryEdge[edgesOnVerticesOnBoundaryEdge[mask]] - - # otherEdgeMask should have exactly one non-zero entry per vertex - assert(numpy.all(numpy.equal(numpy.sum(numpy.array(otherEdgeMask, int), - axis=2), 1))) - - (edgeIndices, voeIndices, eovIndices) = numpy.nonzero(otherEdgeMask) - - boundaryEdgesOnEdge = -numpy.ones((nEdges, 2), int) - boundaryEdgesOnEdge[boundaryEdges[edgeIndices], voeIndices] = \ - edgesOnVerticesOnBoundaryEdge[edgeIndices, voeIndices, eovIndices] - - iEdge = startEdge - edgeSequence = [] - edgeSigns = [] - vertexSequence = [] - signs = (1, -1) - vertexOnEdgeIndex = 1 - nextEdge = boundaryEdgesOnEdge[iEdge, vertexOnEdgeIndex] - while True: - edgeSequence.append(iEdge) - edgeSigns.append(signs[vertexOnEdgeIndex]) - vertexSequence.append(verticesOnEdge[iEdge, vertexOnEdgeIndex]) - - # a trick to determine which is the next vertex and edge to follow - vertexOnEdgeIndex = int(boundaryEdgesOnEdge[nextEdge, 0] == iEdge) - - iEdge = nextEdge - nextEdge = boundaryEdgesOnEdge[nextEdge, vertexOnEdgeIndex] - if iEdge == startEdge: +def getEdgeSequenceOnBoundary(startEdge, edgeSign, edgesOnVertex, + verticesOnEdge): + # Follows the boundary from a starting edge to produce a sequence of + # edges that form a closed loop. + # + # startEdge is an edge on the boundary that will be both the start and + # end of the loop. + # + # isBoundaryEdge is a mask that indicates which edges are on the + # boundary + # + # returns lists of edges, edge signs and vertices + + iEdge = startEdge + edgeSequence = [] + vertexSequence = [] + while(True): + assert(edgeSign[iEdge] == 1. or edgeSign[iEdge] == -1.) + if edgeSign[iEdge] == 1.: + v = 0 + else: + v = 1 + iVertex = verticesOnEdge[iEdge, v] + + eov = edgesOnVertex[iVertex, :] + + # find the edge that is not iEdge but is on the boundary + nextEdge = -1 + for edge in eov: + if edge != iEdge and edgeSign[edge] != 0: + nextEdge = edge break + assert(nextEdge != -1) + + edgeSequence.append(iEdge) + vertexSequence.append(iVertex) + + iEdge = nextEdge + + if iEdge == startEdge: + break + + edgeSequence = numpy.array(edgeSequence) + edgeSequenceSigns = edgeSign[edgeSequence] + vertexSequence = numpy.array(vertexSequence) - edgeSequence = numpy.array(edgeSequence) - edgeSigns = numpy.array(edgeSigns) - vertexSequence = numpy.array(vertexSequence) + return (edgeSequence, edgeSequenceSigns, vertexSequence) - return (edgeSequence, edgeSigns, vertexSequence) + +def extractSouthernBounary(mesh, mocMask, latBuffer): + # Extrcts the southern boundary of each region mask in mocMask. Mesh info + # is taken from mesh. latBuffer is a number of radians above the southern- + # most point that should be considered to definitely be in the southern + # boundary. southernBoundaryEdges = [] southernBounderyEdgeSigns = [] @@ -106,8 +81,8 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): nCells = mesh.dims['nCells'] nEdges = mesh.dims['nEdges'] - nRegions = moc.dims['nRegions'] - assert(moc.dims['nCells'] == nCells) + nRegions = mocMask.dims['nRegions'] + assert(mocMask.dims['nCells'] == nCells) # convert to python zero-based indices cellsOnEdge = mesh.variables['cellsOnEdge'].values-1 @@ -124,7 +99,9 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): southernBoundaryVertices = [] for iRegion in range(nRegions): - cellMask = moc.variables['regionCellMasks'][:, iRegion].values + name = mocMask.regionNames[iRegion].values.astype('U') + print(name) + cellMask = mocMask.variables['regionCellMasks'][:, iRegion].values # land cells are outside not in the MOC region cellsOnEdgeMask = numpy.zeros(cellsOnEdge.shape, bool) @@ -132,19 +109,34 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): cellsOnEdgeMask[cellsOnEdgeInRange] = \ cellMask[cellsOnEdge[cellsOnEdgeInRange]] == 1 - isMOCBoundaryEdge = (cellsOnEdgeMask[:, 0] != cellsOnEdgeMask[:, 1]) + print(' computing edge sign...') + edgeSign = numpy.zeros(nEdges) + # positive sign if the first cell on edge is in the region + mask = numpy.logical_and(cellsOnEdgeMask[:, 0], + numpy.logical_not(cellsOnEdgeMask[:, 1])) + edgeSign[mask] = -1. + # negative sign if the second cell on edge is in the region + mask = numpy.logical_and(cellsOnEdgeMask[:, 1], + numpy.logical_not(cellsOnEdgeMask[:, 0])) + edgeSign[mask] = 1. + isMOCBoundaryEdge = edgeSign != 0. edgesMOCBoundary = numpy.arange(nEdges)[isMOCBoundaryEdge] + print(' done.') startEdge = numpy.argmin(latEdge[isMOCBoundaryEdge]) startEdge = edgesMOCBoundary[startEdge] minLat = latEdge[startEdge] + print(' getting edge sequence...') # follow the boundary from this point to get a loop of edges # Note: it is possible but unlikely that the southern-most point is # not within bulk region of the MOC mask if the region is not a single # shape - edgeSequence, edgeSigns, vertexSequence = \ - getEdgeSequenceOnBoundary(startEdge, isMOCBoundaryEdge) + edgeSequence, edgeSequenceSigns, vertexSequence = \ + getEdgeSequenceOnBoundary(startEdge, edgeSign, edgesOnVertex, + verticesOnEdge) + + print(' done: {} edges in transect.'.format(len(edgeSequence))) aboveSouthernBoundary = latEdge[edgeSequence] > minLat + latBuffer @@ -168,7 +160,7 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): if len(startIndices) == 0: # the whole sequence is the southern boundary southernBoundaryEdges.append(edgeSequence) - southernBounderyEdgeSigns.append(edgeSigns) + southernBounderyEdgeSigns.append(edgeSequenceSigns) southernBoundaryVertices.append(vertexSequence) continue @@ -183,7 +175,7 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): indices = numpy.mod(indices, len(edgeSequence)) southernBoundaryEdges.append(edgeSequence[indices]) - southernBounderyEdgeSigns.append(edgeSigns[indices]) + southernBounderyEdgeSigns.append(edgeSequenceSigns[indices]) # we want one extra vertex in the vertex sequence indices = numpy.arange(endIndices[longest], @@ -196,11 +188,11 @@ def getEdgeSequenceOnBoundary(startEdge, isBoundaryEdge): southernBoundaryVertices) -def addTransectsToMOC(mesh, moc, southernBoundaryEdges, +def addTransectsToMOC(mesh, mocMask, southernBoundaryEdges, southernBounderyEdgeSigns, southernBoundaryVertices): - # Creates transect fields in moc from the edges, edge signs and vertices - # defining the southern boundaries. Mesh info (nEdges and nVertices) is - # taken from the mesh file. + # Creates transect fields in mocMask from the edges, edge signs and + # vertices defining the southern boundaries. Mesh info (nEdges and + # nVertices) is taken from the mesh file. nTransects = len(southernBoundaryEdges) @@ -241,16 +233,29 @@ def addTransectsToMOC(mesh, moc, southernBoundaryEdges, transectVertexGlobalIDs[iTransect, 0:transectCount] \ = southernBoundaryVertices[iTransect] + 1 - moc['transectEdgeMasks'] = (('nEdges', 'nTransects'), transectEdgeMasks) - moc['transectEdgeMaskSigns'] = (('nEdges', 'nTransects'), - transectEdgeMaskSigns) - moc['transectEdgeGlobalIDs'] = (('nTransects', 'maxEdgesInTransect'), - transectEdgeGlobalIDs) + mocMask['transectEdgeMasks'] = \ + (('nEdges', 'nTransects'), transectEdgeMasks) + mocMask['transectEdgeMaskSigns'] = (('nEdges', 'nTransects'), + transectEdgeMaskSigns) + mocMask['transectEdgeGlobalIDs'] = (('nTransects', 'maxEdgesInTransect'), + transectEdgeGlobalIDs) + + mocMask['transectVertexMasks'] = (('nVertices', 'nTransects'), + transectVertexMasks) + mocMask['transectVertexGlobalIDs'] = \ + (('nTransects', 'maxVerticesInTransect'), transectVertexGlobalIDs) + + mocMask['transectNames'] = mocMask.regionNames.rename( + {'nRegions': 'nTransects'}) + + mocMask['nTransectsInGroup'] = mocMask.nRegionsInGroup.rename( + {'nRegionGroups': 'nTransectGroups'}) + + mocMask['transectsInGroup'] = mocMask.regionsInGroup.rename( + {'nRegionGroups': 'nTransectGroups', 'maxRegionsInGroup': 'maxTransectsInGroup'}) - moc['transectVertexMasks'] = (('nVertices', 'nTransects'), - transectVertexMasks) - moc['transectVertexGlobalIDs'] = (('nTransects', 'maxVerticesInTransect'), - transectVertexGlobalIDs) + mocMask['transectGroupNames'] = mocMask.regionGroupNames.rename( + {'nRegionGroups': 'nTransectGroups'}) if __name__ == "__main__": @@ -270,15 +275,15 @@ def addTransectsToMOC(mesh, moc, southernBoundaryEdges, required=True) args = parser.parse_args() - moc = xarray.open_dataset(args.in_file) + mocMask = xarray.open_dataset(args.in_file) mesh = xarray.open_dataset(args.mesh_file) southernBoundaryEdges, southernBounderyEdgeSigns, \ southernBoundaryVertices = \ - extractSouthernBounary(mesh, moc, latBuffer=3.*numpy.pi/180.) + extractSouthernBounary(mesh, mocMask, latBuffer=3.*numpy.pi/180.) - addTransectsToMOC(mesh, moc, southernBoundaryEdges, + addTransectsToMOC(mesh, mocMask, southernBoundaryEdges, southernBounderyEdgeSigns, southernBoundaryVertices) - moc.to_netcdf(args.out_file) + mocMask.to_netcdf(args.out_file) diff --git a/ocean/performance_testing/README b/ocean/performance_testing/README new file mode 100644 index 000000000..a0053df03 --- /dev/null +++ b/ocean/performance_testing/README @@ -0,0 +1,23 @@ +This automated performance testing suite was created by Divya Jaganathan for +the Parallel Computing Summer Research Institute, 2018, with guidance from Mark +Petersen, LANL. + +To run a performance test, start in any MPAS-Ocean run directory and add links to: +- ocean_model +- metis +- from this directory: + submit_performance_test_to_queue.py + performance_test.py + plot_from_files.py + +There are three ways to run this testing suite: + +1. From a front end node, this command will submit a single job to the queue, of the size required. +./submit_performance_test_to_queue.py -M 128 -m 16 -r EC60to30 + +2. From a compute node, run this directly +./performance_test.py + +3. The above steps creates plots with just those tests. After running several jobs, + the data can be combined into a single plot with plot_from_files.py. This file is case specific, and + requires the user to alter lines by hand. diff --git a/ocean/performance_testing/performance_test.py b/ocean/performance_testing/performance_test.py new file mode 100755 index 000000000..e4b6984fd --- /dev/null +++ b/ocean/performance_testing/performance_test.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python + +""" + Name: performance_test.py + Author: Divya Jaganathan + Date: 6 July, 2018 + +This script is automatically called by submit_performance_test_to_queue.py to run a batch job to get performance plots and data + +This script can also be used for an interactive job submission using the following command format: + +command format (to run an interactive job) : +./performance_test.py + +Access files required to run this script: + 1. namelist.ocean + 2. graph.info + 3. metis file (rename gpmetis to metis or vice-versa in this script when creating a soft link) + 4. ocean_model (executable file) + +NOTE: When running a large number of tasks (>10k), check the name of log.ocean.0000.out file generated - no. of zeros in the file name changes +""" + +import subprocess +import numpy as np +import re +import sys +import datetime +from time import strftime +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt +import os +import shlex + +# Setting OMP variables for NO multithreading + +os.environ['OMP_NUM_THREADS'] = '1' +os.environ['OMP_PLACES'] = 'threads' + +timenow = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + +# Read namelist to obtain the run duration used in calculating SYPD +time_fr = open("namelist.ocean", 'r') + +for line in time_fr: + m1 = re.search("config_run_duration", line) + if m1: + parts = line.split("=", 1)[1] + subparts = re.split(':|\'|_', parts) + timeparts = subparts[2:5] + dateparts = re.split('-', subparts[1]) + if len(dateparts) == 1: + simulated_time_in_sec = int( + timeparts[0]) * 60 * 60 + int(timeparts[1]) * 60 + int(timeparts[2]) + else: + simulated_time_in_sec = int(dateparts[2]) * 24 * 60 * 60 + int( + timeparts[0]) * 60 * 60 + int(timeparts[1]) * 60 + int(timeparts[2]) + +time_fr.close() + +subprocess.check_call(['mkdir', '-p', 'data_performance']) +subprocess.check_call(['mkdir', '-p', 'figures_performance']) + +# Store the details on number of cells for this resolution + +cells_fr = open("graph.info", 'r') +cells = str(cells_fr.readline().split(" ")[0]) +cells_fr.close() + +nprocs_max = int(sys.argv[1]) +nprocs_min = int(sys.argv[2]) +machine_long_name = sys.argv[3] +cores_per_node = float(sys.argv[4]) +resolution_name = sys.argv[5] + +# plane_size is used to define the plane_distribution flag in srun +plane_size = str(int(cores_per_node)) + +# Performance data evaluation begins here +niter = int(np.log2(nprocs_max)) - int(np.log2(nprocs_min)) + 1 +nsamples_per_procnum = 3 + +time = np.zeros(shape=(1, niter)) +procs = np.zeros(shape=(1, niter)) +SYPD = np.zeros(shape=(1, niter)) + +i = nprocs_max +j = niter - 1 + +writefilename = "data_performance/" + machine_long_name + "_" + \ + str(nprocs_max) + "_" + timenow + ".txt" +fw = open(writefilename, 'a+') +fw.write( + 'Time: %s \nMachine: %s\nResolution: %s\nHorizontal cells: %s\nRun duration: %s\nRun time in sec: %d\nFormat: #Procs|Sample Runs|Average|SYPD \n' % + (timenow, + machine_long_name, + resolution_name, + cells, + timeparts, + simulated_time_in_sec)) +fw.flush() + +while i >= nprocs_min: + + local_N = int(np.ceil(i / cores_per_node)) + foldername = "perf_p" + str(i) + "_gr_openmpi" + subprocess.check_call(['rm', '-rf', foldername]) + subprocess.check_call(['mkdir', foldername]) + fw.write('%s \t' % i) + sum = 0 + + # Generate the log and graph files + subprocess.check_call(['./metis', 'graph.info', str(i)]) + print "metis" + str(i) + "completed" + + for sample in range(nsamples_per_procnum): + subprocess.check_call( + ['rm', '-rf', 'log*', 'analysis_members', 'output.nc']) + args = ['srun', + '-N', + str(local_N), + '-n', + str(i), + '--cpu_bind=verbose,core', + '--distribution=plane=%s' % plane_size, + './ocean_model'] + print "running", ' '.join(args) + subprocess.check_call(args) + + # Search for time integration and write to a file + fr = open("log.ocean.0000.out", 'r') + for line in fr: + m = re.search("2 time integration", line) + if m: + numbers = line.split("integration", 1)[1] + first_number = numbers.split()[0] + fw.write('%s \t' % first_number) + sum = sum + float(first_number) + fr.close() + fname = "log_p" + str(i) + "_s" + str(sample + 1) + filepath = foldername + "/" + fname + subprocess.check_call(['mv', 'log.ocean.0000.out', filepath]) + + average = sum / nsamples_per_procnum + time[0][j] = average + procs[0][j] = i + SYPD[0][j] = simulated_time_in_sec / (365 * average) + fw.write('%s \t %s\n' % (str(average), str(SYPD[0][j]))) + fw.flush() + perfect = SYPD[0][j] / procs[0][j] * procs + + # create plot with data so far + plt.clf() + plt.loglog(procs[0][j:], SYPD[0][j:], '-or', + label=resolution_name + ', ' + machine_long_name) + plt.loglog(procs[0][j:], perfect[0][j:], '--k', label='perfect scaling') + plt.title('MPAS-Ocean Performance Curve') + plt.xlabel('Number of MPI ranks') + plt.ylabel('Simulated Years Per Day (SYPD)') + plt.legend(loc='upper left') + plt.grid(which='major') + plt.xlim((procs[0][j] / 2.0, nprocs_max * 2.0)) + plt.tight_layout() + figurenamepath = "figures_performance/" + resolution_name + '_' + \ + machine_long_name + '_' + str(nprocs_max) + "_" + timenow + ".png" + plt.savefig(figurenamepath) + i = i / 2 + j = j - 1 +fw.close() diff --git a/ocean/performance_testing/plot_from_files.py b/ocean/performance_testing/plot_from_files.py new file mode 100755 index 000000000..b2998315e --- /dev/null +++ b/ocean/performance_testing/plot_from_files.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python + +""" +Name: plot_from_files.py +Author: Divya Jaganathan +Date: 26 July, 2018 + +Plots a single plot of different performance curves from different performance_data text files in a folder + +""" + +import glob +import matplotlib +matplotlib.use('agg') +import numpy as np +import matplotlib.pyplot as plt +import subprocess + +path = "/lustre/scratch2/turquoise/divjag2005/case_runs/performance_results_4096/*.txt" + +files = glob.glob(path) +files = [files[4], files[0], files[1], files[3], files[2]] +num_files = len(files) +print(num_files) + +no_res_in_a_file = 9 +file_counter = 0 + +array_x = np.zeros(shape=(num_files, no_res_in_a_file)) +array_y = np.zeros(shape=(num_files, no_res_in_a_file)) + +colors = ["g", "k", "m", "r", "b"] +labels = [ + "uniform 60km", + "variable 60to30km", + "uniform 30km", + "variable 60to15km", + "uniform 15km"] + + +for file in files: + + f = open(file, 'r') + ob = f.read().split('\n') + num_lines = len(ob) - 1 + line_counter = 6 + i = 0 + rank_column = 0 + SYPD_column = 7 + + while line_counter < num_lines: + array_x[file_counter][i] = ob[line_counter].split('\t')[rank_column] + array_y[file_counter][i] = ob[line_counter].split('\t')[SYPD_column] + line_counter = line_counter + 1 + i = i + 1 + + font = {'weight': 'bold', + 'size': '14'} + + matplotlib.rc('font', **font) + plt.loglog(array_x[file_counter][0:i - + 1], array_y[file_counter][0:i - + 1], '-o', color=colors[file_counter], label="%s" % + labels[file_counter]) + perfect = (array_y[file_counter][i - 1] / + array_x[file_counter][i - 1]) * array_x[file_counter][0:i - 1] + plt.loglog( + array_x[file_counter][0:i - 1], + perfect, + '--', + color=colors[file_counter]) + file_counter = file_counter + 1 + f.close() + +plt.xlabel('Number of MPI ranks', fontsize=14, weight='bold') +plt.ylabel('SYPD', fontsize=14, weight='bold') +plt.title(' 36 Core Broadwell (No HT)', fontsize=14, weight='bold') +plt.xlim((10, 10000)) +plt.ylim((0.05, 4000)) +plt.tight_layout() +plt.grid() +plt.legend(title='resolution', loc='upper left') +plt.savefig('result.png') diff --git a/ocean/performance_testing/submit_performance_test_to_queue.py b/ocean/performance_testing/submit_performance_test_to_queue.py new file mode 100755 index 000000000..ce641f05f --- /dev/null +++ b/ocean/performance_testing/submit_performance_test_to_queue.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python + +""" +Name: submit_performance_test_to_queue.py +Author: Divya Jaganathan +Date: July 6, 2018 + +Submits request for a batch job to carry out successive performance runs starting from maximum +number of tasks. Load modules before calling this script. + +command format: +./submit_performance_test_to_queue.py -M -m -n -r + +Examples: + +On any machine, you can grab tarred run directories here: +https://zenodo.org/record/1252437#.W5FIppNKjUI +add a link to +- metis +- ocean_model executable +- submit_performance_test_to_queue.py (here) +- performance_test.py (here) + +On any machine log-in node, all you need is: + ./submit_performance_test_to_queue.py +This will submit a single job to the queue, and produce the default test of +64 through 2 by powers of 2, and auto-detect your machine. Load modules before +calling this script, and submission will keep the same modules. + +Or, one can specify everything with flags. This tests 128 to 16 cores by powers of two. + ./submit_performance_test_to_queue.py -M 128 -m 16 -r EC60to30 + +On cori, you have to specify cori-knl or cori-haswell, as follows: + ./submit_performance_test_to_queue.py -M 128 -m 16 -r EC60to30 -n cori-knl + +After the job completes, you will find data and auto-generated plots in these directories: + data_performance + figures_performance +""" + +import subprocess +import argparse +import shlex +import numpy as np +import os + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument( + "-M", + "--max_tasks", + dest="max_tasks", + help="Maximum number of tasks, defaults to 64.", + default=64) +parser.add_argument( + "-m", + "--min_tasks", + dest="min_tasks", + help="Minimum number of tasks, defaults to 2.", + default=2) +parser.add_argument( + "-n", + "--machine_name", + dest="machine_name", + help="This script auto-detects the machine from the node name (e.g. 'gr' for grizzly). Use this flag to override. On cori, enter cori-haswell or cori-knl", + default=os.uname()[1][0:2]) +parser.add_argument( + "-r", + "--resolution_name", + dest="resolution_name", + help="This label appears on the title of the plot.", + default="MPAS-O") +args = parser.parse_args() + +max_tasks = int(args.max_tasks) +min_tasks = int(args.min_tasks) +machine_name = args.machine_name +resolution_name = args.resolution_name + +job_id = "MPASO_perf_P" + str(max_tasks) + args.resolution_name +output_name = "slurm_" + job_id + ".out" + +# NODES_REQUIRED to request for resources is calculated assuming no hyperthreads. +# Changes to this can be implemented by changing cores_per_node specific +# to the machine + +if machine_name == 'gr': + machine_long_name = 'grizzly' + cores_per_node = 36.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + if NODES_REQUIRED < 70: + qos = "interactive" + else: + qos = "standard" + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'wf': + machine_long_name = 'wolf' + cores_per_node = 16.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + if NODES_REQUIRED < 70: + qos = "interactive" + else: + qos = "standard" + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'ba': + machine_long_name = 'badger' + cores_per_node = 36.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + if NODES_REQUIRED < 70: + qos = "interactive" + else: + qos = "standard" + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'cori-haswell': + machine_long_name = 'cori-haswell' + cores_per_node = 32.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d -C haswell --qos=regular -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'cori-knl': + machine_long_name = 'cori-knl' + cores_per_node = 68.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d -C knl --qos=regular -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name == 'ed': + machine_long_name = 'edison' + cores_per_node = 24.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d --qos=debug -J %s -o %s 'performance_test.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +elif machine_name[0:5] == 'theta': + machine_long_name = 'theta' + cores_per_node = 64.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "qsub -n %d --jobname=%s -O %s 'performance_test.py' %d %d %s %d %s" % ( + NODES_REQUIRED, job_id, output_name, max_tasks, min_tasks, machine_long_name, cores_per_node, resolution_name) +else: + print "Invalid machine or have not mentioned haswell or knl on Cori" + + +print "running: ", runcommand +s_args = shlex.split(runcommand) +subprocess.check_call(s_args) diff --git a/ocean/performance_testing/weights/call_to_performance_testing.py b/ocean/performance_testing/weights/call_to_performance_testing.py new file mode 100644 index 000000000..40bd14fd2 --- /dev/null +++ b/ocean/performance_testing/weights/call_to_performance_testing.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python + +""" +Name: call_to_performance_testing.py +Author: Divya Jaganathan +Date: July 6, 2018 + +Submits request for a batch job to carry out successive performance runs starting from maximum number of tasks + +command format: python call_to_performance_testing.py -c -M -m -r -x + + +""" +import subprocess +import argparse +import shlex +import numpy as np +import os + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument( + "-c", + "--cpu_type", + dest="cpu_type", + help="If cori, enter cori-haswell/cori-knl", + default=os.uname()[1][0:2]) +parser.add_argument( + "-M", + "--max_tasks", + dest="max_tasks", + help="Maximum number of tasks", + required=True) +parser.add_argument( + "-m", + "--min_tasks", + dest="min_tasks", + help="Minimum number of tasks", + default=2) +parser.add_argument( + "-r", + "--resolution", + dest="resolution", + help="Resolution ", + default="QU") +parser.add_argument( + "-x", + "--max_vertex_weight", + dest="max_vertex_weight", + help="Maximum Vertex Weight, x", + default="0.0") + +args = parser.parse_args() + +cpu_type = args.cpu_type +max_tasks = int(args.max_tasks) +min_tasks = int(args.min_tasks) +res = args.resolution +x = args.max_vertex_weight + +job_id = res + "_perf_" + str(max_tasks) +output_name = "slurm_" + job_id + ".out" + + +# NODES_REQUIRED to request for resources is calculated assuming no hyperthreads. +# Changes to this can be implemented by changing cores_per_node specific +# to the machine + +if cpu_type == 'gr': + cores_per_node = 36.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + if NODES_REQUIRED < 70: + qos = "interactive" + else: + qos = "standard" + runcommand = "sbatch -N %d -n %d --qos=%s -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, qos, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node, x) +elif cpu_type == 'cori-haswell': + cores_per_node = 32.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d -C haswell --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node, x) +elif cpu_type == 'cori-knl': + cores_per_node = 68.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d -C knl --qos=regular -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node, x) +elif cpu_type == 'ed': + cores_per_node = 24.0 + NODES_REQUIRED = int(np.ceil(max_tasks / cores_per_node)) + runcommand = "sbatch -N %d -n %d --qos=debug -J %s -o %s 'performance_testing.py' %d %d %s %d %s" % ( + NODES_REQUIRED, max_tasks, job_id, output_name, max_tasks, min_tasks, cpu_type, cores_per_node, x) +else: + print "Invalid machine or have not mentioned haswell or knl on Cori" + + +s_args = shlex.split(runcommand) +print "running", ''.join(s_args) + +subprocess.check_call(s_args) diff --git a/ocean/performance_testing/weights/generate_graph.info_with_wgts.py b/ocean/performance_testing/weights/generate_graph.info_with_wgts.py new file mode 100644 index 000000000..912aeac4d --- /dev/null +++ b/ocean/performance_testing/weights/generate_graph.info_with_wgts.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python + +""" +Name: generate_graph.info_with_wgts.py +Author: Divya Jaganathan +Date: 17 July, 2018 + +Assigns vertex weight to each horizontal cell in graph.info (in gpmetis format) +Reads: , +Writes: graph.info_with_wgts_ + +Flags(s) in call-command: + -x or --vertex_weight=, default=0.0 + -d or --data_file=, default=init.nc + -g or --graph_file=, default=graph.info + +""" + +import numpy as np +import netCDF4 as nc4 +from netCDF4 import MFDataset +import argparse + +parser = \ + argparse.ArgumentParser(description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) +parser.add_argument( + "-x", + "--vertex_weight", + dest="vertex_weight", + help="Exponent factor in the weighing function defining dependence on depth (maxLevelCell)", + default=0.0) + +parser.add_argument( + "-d", + "--data_file", + dest="data_filename", + help="File containing the maxLevelCell data (Default: init.nc)", + default="init.nc") + +parser.add_argument( + "-g", + "--graph_file", + dest="graph_filename", + help="Unweighed graph file (Default: graph.info)", + default="graph.info") + + +args = parser.parse_args() + +depth_dependence_factor_x = float(args.vertex_weight) +graph_filename = args.graph_filename +data_filename = args.data_filename + +file = MFDataset(data_filename) + +levels = file.variables['maxLevelCell'][:] + +minimum = np.amin(levels) + +ratio = np.divide(levels, minimum) +weights = np.ceil((np.float_power(ratio, depth_dependence_factor_x))) +weights = weights.astype(int) +file.close() + +filename = "graph.info_with_wgts_" + str(depth_dependence_factor_x) +fr = open(graph_filename, 'r') +fw = open(filename, 'w') + +counter = -1 + +for line in fr: + if counter == -1: + temp = line.split("\n", 1)[0] + # 010 indicates that the graph.info file is formatted to include the + # cell weights + fw.write("%s 010 \n" % temp) + else: + temp = line.split("\n", 1)[0] + fw.write("%d %s \n" % (weights[counter], temp)) + counter = counter + 1 + +fr.close() +fw.close() diff --git a/ocean/performance_testing/weights/performance_testing.py b/ocean/performance_testing/weights/performance_testing.py new file mode 100644 index 000000000..6aa840c9b --- /dev/null +++ b/ocean/performance_testing/weights/performance_testing.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python + +""" + Name: performance_testing.py + Author: Divya Jaganathan + Date: 6 July, 2018 + +This script is automatically called by call_to_performance_testing.py to run a batch job to get performance plots and data + +Access files required to run this script: + 1. namelist.ocean + 2. graph.info + 3. metis file (rename gpmetis to metis or vice-versa in this script when creating a soft link) + 4. ocean_model (executable file) + +NOTE: When running a large number of tasks (>10k), check the name of log.ocean.0000.out file generated - no. of zeros in the file name changes + +""" + + +import subprocess +import numpy as np +import re +import sys +import datetime +from time import strftime +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt +import os +import shlex + +# Setting OMP variables for NO multithreading + +os.environ['OMP_NUM_THREADS'] = '1' +os.environ['OMP_PLACES'] = 'threads' + +timenow = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + +# To obtain the run duration used in calculating SYPD + +time_fr = open("namelist.ocean", 'r') + +for line in time_fr: + m1 = re.search("config_run_duration", line) + if m1: + parts = line.split("=", 1)[1] + subparts = re.split(':|\'|_', parts) + timeparts = subparts[2:5] + dateparts = re.split('-', subparts[1]) + if len(dateparts) == 1: + simulated_time_in_sec = int( + timeparts[0]) * 60 * 60 + int(timeparts[1]) * 60 + int(timeparts[2]) + else: + simulated_time_in_sec = int(dateparts[2]) * 24 * 60 * 60 + int( + timeparts[0]) * 60 * 60 + int(timeparts[1]) * 60 + int(timeparts[2]) + +time_fr.close() + +# To store the details on number of cells (~ resolution) + +cells_fr = open("graph.info", 'r') +cells = str(cells_fr.readline().split(" ")[0]) +cells_fr.close() + +nprocs_max = int(sys.argv[1]) +nprocs_min = int(sys.argv[2]) +cpu_type = sys.argv[3] +cores_per_node = float(sys.argv[4]) +x = float(sys.argv[5]) + +# plane_size is used to define the plane_distribution flag in srun +plane_size = str(int(cores_per_node)) + +# Performance data evaluation begins here - + +niter = int(np.log2(nprocs_max)) - int(np.log2(nprocs_min)) + 1 +nsamples_per_procnum = 5 + +time = np.zeros(shape=(1, niter)) +procs = np.zeros(shape=(1, niter)) +SYPD = np.zeros(shape=(1, niter)) + +#i = nprocs_max +#j = niter + +writefilename = "data_" + cpu_type + "_" + \ + str(nprocs_max) + "_" + timenow + ".txt" +fw = open(writefilename, 'a+') +fw.write( + 'Time: %s \nMachine: %s\nNo. of Cells: %s\nRun duration: %s\nRun time in sec: %d\nFormat: #Procs|Sample Runs|Average|SYPD \n' % + (timenow, cpu_type, cells, timeparts, simulated_time_in_sec)) + + +while x >= 0: + + graph_call = "python generate_graph.info_with_wgts.py -d init.nc -g graph.info -x %s" % x + g_args = shlex.split(graph_call) + print "running", ''.join(g_args) + subprocess.check_call(g_args) + + foldername_wgt = "weight" + str(x) + subprocess.check_call(['mkdir', foldername_wgt]) + graph_filename = "graph.info_with_wgts_" + str(x) + i = nprocs_max + j = niter + + while i >= nprocs_min: + + local_N = int(np.ceil(i / cores_per_node)) + sample = nsamples_per_procnum + foldername = foldername_wgt + "/perf_p" + str(i) + "_gr_openmpi" + subprocess.check_call(['mkdir', '-p', foldername]) + fw.write('%s \t' % i) + sum = 0 + subprocess.check_call(['./metis', graph_filename, str(i)]) + print "metis" + str(i) + "completed" + graph_part_name = graph_filename + ".part." + str(i) + to_name = "graph.info.part." + str(i) + subprocess.check_call(['mv', graph_part_name, to_name]) + + while sample >= 1: + args = ['srun', + '-N', + str(local_N), + '-n', + str(i), + '--cpu_bind=verbose,core', + '--distribution=plane=%s' % plane_size, + './ocean_model'] + print "running", ''.join(args) + subprocess.check_call(args) + + # Search for time integration and write to a file + fr = open("log.ocean.0000.out", 'r') + for line in fr: + m = re.search("2 time integration", line) + if m: + numbers = line.split("integration", 1)[1] + first_number = numbers.split()[0] + fw.write('%s \t' % first_number) + sum = sum + float(first_number) + + fname = "log_p" + str(i) + "_s" + str(sample) + filepath = foldername + "/" + fname + sample = sample - 1 + subprocess.check_call(['mv', 'log.ocean.0000.out', filepath]) + + average = sum / nsamples_per_procnum + time[0][j - 1] = average + procs[0][j - 1] = i + SYPD[0][j - 1] = simulated_time_in_sec / (365 * average) + fw.write('%s \t %s\n' % (str(average), str(SYPD[0][j - 1]))) + i = i / 2 + j = j - 1 + subprocess.check_call(['mv', to_name, foldername]) + x = x - 0.5 + +# plotting .. + +subprocess.check_call(['mkdir', '-p', 'data_figures']) + +perfect = SYPD[0][0] / procs[0][0] * procs +plt.loglog(procs[0], SYPD[0], '-or', label=str(x)) +plt.loglog(procs[0], perfect[0], '--k', label='perfect scaling') +plt.title(r'MPAS-Ocean Performance Curve (Broadwell 36-cores No HT)') +plt.xlabel('Number of MPI ranks') +plt.ylabel('Simulated Years Per Day (SYPD)') +plt.legend(loc='upper left') +plt.grid() +plt.xlim((1, nprocs_max * 2)) +plt.tight_layout() +figurenamepath = "data_figures/fig_" + cpu_type + \ + str(nprocs_max) + "_" + timenow + ".png" +plt.savefig(figurenamepath) +subprocess.check_call(['mv', writefilename, 'data_figures']) + +fr.close() +fw.close() + +# End diff --git a/operators/vector_reconstruction.py b/operators/vector_reconstruction.py new file mode 100755 index 000000000..674b069fd --- /dev/null +++ b/operators/vector_reconstruction.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python + +""" +Extract Cartesian (X, Y, Z), zonal and meridional components of an MPAS vector +field, given the field on edge normals. + +This tool requires that the field 'coeffs_reconstruct' has been saved to a +NetCDF file. The simplest way to do this is to include the following stream +in a forward run: + + + + + + +and run the model for one time step. + +""" +# Authors +# ------- +# Xylar Asay-Davis + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import numpy +import netCDF4 +import argparse +import sys +from datetime import datetime +from dask.diagnostics import ProgressBar + + +def write_netcdf(ds, fileName, fillValues=netCDF4.default_fillvals): + encodingDict = {} + variableNames = list(ds.data_vars.keys()) + list(ds.coords.keys()) + for variableName in variableNames: + dtype = ds[variableName].dtype + for fillType in fillValues: + if dtype == numpy.dtype(fillType): + encodingDict[variableName] = \ + {'_FillValue': fillValues[fillType]} + break + + delayed_obj = ds.to_netcdf(fileName, encoding=encodingDict, compute=False) + + print('Writing {}'.format(fileName)) + with ProgressBar(): + delayed_obj.compute() + + +def reconstruct_variable(outVarName, variableOnEdges, dsMesh, + coeffs_reconstruct, dsOut, chunkSize=32768): + nCells = dsMesh.sizes['nCells'] + # nEdgesOnCell = dsMesh.nEdgesOnCell.values + edgesOnCell = dsMesh.edgesOnCell - 1 + + variableOnEdges.load() + edgesOnCell.load() + coeffs_reconstruct.load() + + dims = [] + sizes = [] + varIndices = {} + for dim in variableOnEdges.dims: + size = variableOnEdges.sizes[dim] + varIndices[dim] = numpy.arange(size) + if dim == 'nEdges': + dim = 'nCells' + size = nCells + varIndices['nEdges'] = edgesOnCell + dims.append(dim) + sizes.append(size) + + coeffs_reconstruct = coeffs_reconstruct.chunk({'nCells': chunkSize}) + + variable = variableOnEdges[varIndices].chunk({'nCells': chunkSize}) + print('Computing {} at edgesOnCell:'.format(outVarName)) + with ProgressBar(): + variable.compute() + + varCart = [] + + print('Computing Cartesian conponents:') + for index, component in enumerate(['X', 'Y', 'Z']): + var = (coeffs_reconstruct.isel(R3=index)*variable).sum( + dim='maxEdges').transpose(*dims) + outName = '{}{}'.format(outVarName, component) + print(outName) + with ProgressBar(): + var.compute() + dsOut[outName] = var + varCart.append(var) + + latCell = dsMesh.latCell + lonCell = dsMesh.lonCell + latCell.load() + lonCell.load() + + clat = numpy.cos(latCell) + slat = numpy.sin(latCell) + clon = numpy.cos(lonCell) + slon = numpy.sin(lonCell) + + print('Computing zonal and meridional components:') + + outName = '{}Zonal'.format(outVarName) + zonal = -varCart[0]*slon + varCart[1]*clon + print(outName) + with ProgressBar(): + zonal.compute() + dsOut[outName] = zonal + + outName = '{}Meridional'.format(outVarName) + merid = -(varCart[0]*clon + varCart[1]*slon)*slat + varCart[2]*clat + print(outName) + with ProgressBar(): + merid.compute() + dsOut[outName] = merid + + +def main(): + + # client = Client(n_workers=1, threads_per_worker=4, memory_limit='10GB') + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-m", "--meshFileName", dest="meshFileName", + type=str, required=False, + help="An MPAS file with mesh data (edgesOnCell, etc.)") + parser.add_argument("-w", "--weightsFileName", dest="weightsFileName", + type=str, required=False, + help="An MPAS file with coeffs_reconstruct ") + parser.add_argument("-i", "--inFileName", dest="inFileName", type=str, + required=True, + help="An MPAS file with one or more fields on edges " + "to be reconstructed at cell centers. Used for " + "mesh data and/or weights if a separate files " + "are not provided.") + parser.add_argument("-v", "--variables", dest="variables", type=str, + required=True, + help="A comma-separated list of variables on edges to " + "reconstruct") + parser.add_argument("--outVariables", dest="outVariables", type=str, + required=False, + help="A comma-separated list of prefixes for output " + "variable names") + parser.add_argument("-o", "--outFileName", dest="outFileName", type=str, + required=True, + help="An output MPAS file with the reconstructed " + "X, Y, Z, zonal and meridional fields") + args = parser.parse_args() + + if args.meshFileName: + meshFileName = args.meshFileName + else: + meshFileName = args.inFileName + + if args.weightsFileName: + weightsFileName = args.weightsFileName + else: + weightsFileName = args.inFileName + + variables = args.variables.split(',') + if args.outVariables: + outVariables = args.outVariables.split(',') + else: + outVariables = variables + + dsIn = xarray.open_dataset(args.inFileName, mask_and_scale=False) + dsMesh = xarray.open_dataset(meshFileName) + dsWeights = xarray.open_dataset(weightsFileName) + coeffs_reconstruct = dsWeights.coeffs_reconstruct + dsOut = xarray.Dataset() + + for inVarName, outVarName in zip(variables, outVariables): + reconstruct_variable(outVarName, dsIn[inVarName], dsMesh, + coeffs_reconstruct, dsOut) + + for attrName in dsIn.attrs: + dsOut.attrs[attrName] = dsIn.attrs[attrName] + + time = datetime.now().strftime('%c') + + history = '{}: {}'.format(time, ' '.join(sys.argv)) + + if 'history' in dsOut.attrs: + dsOut.attrs['history'] = '{}\n{}'.format(history, + dsOut.attrs['history']) + else: + dsOut.attrs['history'] = history + + write_netcdf(dsOut, args.outFileName) + + +if __name__ == '__main__': + main() diff --git a/python_scripts/decomposition_fields/README b/output_processing/decomposition_fields/README similarity index 100% rename from python_scripts/decomposition_fields/README rename to output_processing/decomposition_fields/README diff --git a/python_scripts/decomposition_fields/decomposition_fields.py b/output_processing/decomposition_fields/decomposition_fields.py similarity index 100% rename from python_scripts/decomposition_fields/decomposition_fields.py rename to output_processing/decomposition_fields/decomposition_fields.py diff --git a/python_scripts/grid_quality/README b/output_processing/grid_quality/README similarity index 100% rename from python_scripts/grid_quality/README rename to output_processing/grid_quality/README diff --git a/python_scripts/grid_quality/grid_quality.py b/output_processing/grid_quality/grid_quality.py similarity index 100% rename from python_scripts/grid_quality/grid_quality.py rename to output_processing/grid_quality/grid_quality.py diff --git a/python_scripts/namelist_generation/.gitignore b/python_scripts/namelist_generation/.gitignore deleted file mode 100644 index 14cb08aa5..000000000 --- a/python_scripts/namelist_generation/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -# Ignore LaTeX files and generated namelist -namelist.input.generated -*.tex diff --git a/python_scripts/namelist_generation/README b/python_scripts/namelist_generation/README deleted file mode 100644 index b25bc069a..000000000 --- a/python_scripts/namelist_generation/README +++ /dev/null @@ -1,66 +0,0 @@ -Authors: Doug Jacobsen, Xylar Asay-Davis -Date: 04/19/13 -Last Modified: 10/26/17 - -This script parses a MPAS Registry.xml file to generates documentation for a -users or developers guide. - -Typical usage is as follows: - - # set the core, one of ocean, landice, cice, etc. - export CORE= - # Set your repo directories: - export MPAS_REPO=~/repos/MPAS - export MPAS_TOOLS_REPO=~/repos/MPAS-Tools - export MPAS_DOCUMENTS_REPO=~/repos/MPAS-Documents - cd $MPAS_REPO - # Compile MPAS so you have a src/core_ocean/Registry_processed.xml file. - # Change the compiler as needed. - make CORE=$CORE gfortran - cd $MPAS_DOCUMENTS_REPO/users_guide/$CORE - # clean up blank lines at the top of the XML file - sed '/./,$!d' $MPAS_REPO/src/core_${CORE}/Registry_processed.xml > \ - Registry_cleaned.xml - $MPAS_TOOLS_REPO/python_scripts/namelist_generation/parse_xml_registry.py \ - -f Registry_cleaned.xml -d section_descriptions \ - -p ${CORE}/section_descriptions - cd .. - make clean CORE=$CORE - make CORE=$CORE - -The -f flag points to the processed registry file (typically with a full path). - -The -d flag points to the local or full path to .tex files that containing -section descriptions for providing additional information in the output latex -documentation. - -Section descriptions are required to be named whatever the section is. For -example, in a namelist, there might be a namelist record named -"&time_management". The script searches the directory listed with the -d -flag for a latex file named time_management.tex, and adds an input line to -the output latex documentation to include this file. - -The -p flag specifies the relative path inside the latex documentation where -the file should be input from. As an example, one might -run it as follows to generate the ocean core's documentation:: - - ./parse_xml_registry.xml -f mpas_root/src/core_ocean/Registry.xml \ - -d mpas_doc_root/users_guide/ocean/section_descriptions \ - -p ocean/section_descriptions - -On output, several files are created which are listed below. - namelist.input.generated - A default namelist.input file for the core that - owns the Registry.xml file. - dimensions.tex - A tabulated description of the dimensions for the core. - namelist_table_documentation.tex - A tabulated description of the namelist - options for the core. - namelist_section_documentation.tex - A more detailed section format - description of the namelist options - for the core. - variable_table_documentation.tex - A tabulated description of the variables - in the core. - variable_section_documentation.tex - A more detailed section formate - description of the variable in the - core. - define_version.tex - A simple file which can be included to define \version - inside the users guide. diff --git a/python_scripts/namelist_generation/cmunrm.otf b/python_scripts/namelist_generation/cmunrm.otf deleted file mode 100644 index b449df04c..000000000 Binary files a/python_scripts/namelist_generation/cmunrm.otf and /dev/null differ diff --git a/python_scripts/namelist_generation/parse_ocean_xml_registry.py b/python_scripts/namelist_generation/parse_ocean_xml_registry.py deleted file mode 100755 index ca1df519e..000000000 --- a/python_scripts/namelist_generation/parse_ocean_xml_registry.py +++ /dev/null @@ -1,1039 +0,0 @@ -#!/usr/bin/env python -import collections -from optparse import OptionParser -import xml.etree.ElementTree as ET - -parser = OptionParser() -parser.add_option("-f", "--forward_registry", dest="forward_registry_path", help="Path to Mode Forward's Preprocessed Registry file", metavar="FILE") -parser.add_option("-a", "--analysis_registry", dest="analysis_registry_path", help="Path to Mode Analysis' Preprocessed Registry file", metavar="FILE") -parser.add_option("-d", "--tex_dir", dest="latex_dir", help="Path to directory with latex addition files.", metavar="DIR") -parser.add_option("-p", "--tex_path", dest="latex_path", help="Path to latex input files that will be written to generated latex.", metavar="PATH") - -options, args = parser.parse_args() - -def break_string(string):#{{{ - i = 0.0 - idx = -1 - - size = 0 - - big_size = 1.8 - small_size = 1.2 - really_small_size = 0.2 - - big_count = 0 - small_count = 0 - really_small_count = 0 - - for c in string: - idx = idx + 1 - if c.isupper(): - big_count = big_count + 1 - size = size + big_size - else: - if c == "l" or c == "i": - really_small_count = really_small_count + 1 - size = size + really_small_size - else: - small_count = small_count + 1 - size = size + small_size - - if size >= 33.5: - return idx - - return -1 - #}}} - -def write_dimension_table(latex, registry, mode):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - dimension_table_header = '{\\bf Name} & {\\bf Units} & {\\bf Description}' - - latex.write('\section{Dimensions}\n') - latex.write('\label{sec:%s_dimensions}\n'%(mode)) - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{1.2in} || p{1.0in} | p{4.0in} |}\n') - latex.write(' \hline \n') - latex.write(' %s \\endfirsthead\n'%dimension_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n'%dimension_table_header) - latex.write(' \hline \n') - latex.write(' \hline \n') - for dims in registry.iter("dims"): - for dim in dims.iter("dim"): - dim_name = dim.attrib['name'] - try: - dim_description = dim.attrib['description'] - except: - dim_description = latex_missing_string - - try: - dim_units = dim.attrib['units'] - if dim_units == "": - dim_units = latex_missing_string - else: - dim_units = "$%s$"%dim_units.replace(' ', '$ $') - except: - dim_units = latex_missing_string - - if dim_description == "": - dim_description = latex_missing_string - else: - equations = dim_description.find('$') - if equations != -1: - dim_desc_split = dim_description.split("$") - - if dim_description.replace('_','')[0] == "$": - replace = False - dim_description = "$" - else: - replace = True - dim_description = "" - - for part in dim_desc_split: - if replace: - dim_description = "%s %s"%(dim_description, part.replace('_','\_')) - replace = False - else: - dim_description = "%s $%s$"%(dim_description, part) - replace = True - else: - dim_description = "%s"%dim_description.replace('_','\_') - - latex.write(' %s & %s & %s \\\\ \n'%(dim_name.replace('_','\_'), dim_units.replace('_','\_'), dim_description.replace('_','\_'))) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') -#}}} - -def write_namelist_table(latex, registry, mode):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - namelist_table_header = '{\\bf Name} & {\\bf Description}' - - latex.write('\section[Namelist options]{\hyperref[chap:namelist_sections]{Namelist options}}\n') - latex.write('\label{sec:%s_namelist_tables}\n'%(mode)) - latex.write('Embedded links point to more detailed namelist information in the appendix.\n') - for nml_rec in registry.iter("nml_record"): - rec_name = nml_rec.attrib['name'] - #latex.write('\subsection[%s]{\hyperref[sec:nm_sec_%s]{%s}}\n'%(rec_name.replace('_','\_'), rec_name, rec_name.replace('_','\_'))) - latex.write('\subsection[%s]{%s}\n'%(rec_name.replace('_','\_'), rec_name.replace('_','\_'))) - latex.write('\label{subsec:%s_nm_tab_%s}\n'%(mode, rec_name)) - - # Add input line if file exists. - try: - junk_file = open('%s/%s.tex'%(options.latex_dir,rec_name), 'r') - latex.write('\input{%s/%s.tex}\n'%(options.latex_path, rec_name)) - junk_file.close() - except: - latex.write('') - - latex.write('\\vspace{0.5in}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} || p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' %s \\endfirsthead\n'%namelist_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n'%namelist_table_header) - latex.write(' \hline\n') - latex.write(' \hline\n') - - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib['name'] - - try: - opt_description = nml_opt.attrib['description'] - except: - opt_description = latex_missing_string - - if opt_description == "": - opt_description = latex_missing_string - else: - equations = opt_description.find('$') - if equations != -1: - opt_desc_split = opt_description.split("$") - - if opt_description.replace(' ','')[0] == "$": - replace = False - opt_description = "$" - else: - replace = True - opt_description = "" - - for part in opt_desc_split: - if replace: - opt_description = "%s %s"%(opt_description, part.replace('_','\_')) - replace = False - else: - opt_description = "%s $%s$"%(opt_description, part) - replace = True - else: - opt_description = "%s"%opt_description.replace('_','\_') - - idx = break_string(opt_name) - if idx >= 29: - latex.write(' \hyperref[sec:nm_sec_%s]{%s-}\hyperref[sec:nm_sec_%s]{%s}& %s \\\\\n'%(opt_name, opt_name[0:idx].replace('_','\_'), opt_name, opt_name[idx:].replace('_','\_'), opt_description)) - else: - latex.write(' \hyperref[sec:nm_sec_%s]{%s} & %s \\\\\n'%(opt_name, opt_name.replace('_','\_'), opt_description)) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') -#}}} - -def write_variable_table(latex, registry, mode):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - variable_table_header = '{\\bf Name} & {\\bf Description}' - - latex.write('\section[Variable definitions]{\hyperref[chap:variable_sections]{Variable definitions}}\n') - latex.write('\label{sec:%s_variable_tables}\n'%mode) - latex.write('Embedded links point to more detailed variable information in the appendix.\n') - for var_struct in registry.iter("var_struct"): - struct_name = var_struct.attrib['name'] - latex.write('\subsection[%s]{\hyperref[sec:var_sec_%s]{%s}}\n'%(struct_name.replace('_','\_'),struct_name,struct_name.replace('_','\_'))) - latex.write('\label{subsec:%s_var_tab_%s}\n'%(mode, struct_name)) - - try: - junk_file = open('%s/%s_struct.tex'%(options.latex_dir,struct_name), 'r') - latex.write('\input{%s/%s_struct.tex}\n'%(options.latex_path, struct_name)) - junk_file.close() - except: - latex.write('') - - latex.write('\\vspace{0.5in}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} | p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' %s \\endfirsthead\n'%variable_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n'%variable_table_header) - latex.write(' \hline\n') - - for node in var_struct.getchildren(): - if node.tag == 'var_array': - for var in node.iter("var"): - var_name = var.attrib['name'] - var_description = var.attrib['description'] - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') - - idx = break_string(var_name) - if idx > -1: - latex.write(' \hyperref[subsec:var_sec_%s_%s]{%s-}\hyperref[subsec:var_sec_%s_%s]{%s} & %s \\\\\n'%(struct_name, var_name, var_name[0:idx].replace('_','\_'), struct_name, var_name, var_name[idx:].replace('_','\_'), var_description)) - else: - latex.write(' \hyperref[subsec:var_sec_%s_%s]{%s} & %s \\\\\n'%(struct_name, var_name, var_name.replace('_','\_'), var_description)) - latex.write(' \hline\n') - elif node.tag == 'var': - var = node - var_name = var.attrib['name'] - var_description = var.attrib['description'] - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') - - idx = break_string(var_name) - if idx > -1: - latex.write(' \hyperref[subsec:var_sec_%s_%s]{%s-}\hyperref[subsec:var_sec_%s_%s]{%s }& %s \\\\\n'%(struct_name, var_name, var_name[0:idx].replace('_','\_'), struct_name, var_name, var_name[idx:].replace('_','\_'), var_description)) - else: - latex.write(' \hyperref[subsec:var_sec_%s_%s]{%s} & %s \\\\\n'%(struct_name, var_name, var_name.replace('_','\_'), var_description)) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') -#}}} - -def write_namelist_sections(latex, sorted_opts, forward_registry, analysis_registry):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - - #latex.write('\chapter[Namelist options]{\hyperref[chap:namelist_tables]{Namelist options}}\n') - latex.write('\chapter[Namelist options]{Namelist options}\n') - latex.write('\label{chap:namelist_sections}\n') -# latex.write('Embedded links point to information in chapter \\ref{chap:namelist_tables}\n') - - for opt in sorted_opts: - found = False - in_forward = False - in_analysis = False - forward_rec_name = "" - analysis_rec_name = "" - - # Search forward registry - for nml_rec in forward_registry.iter("nml_record"):#{{{ - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib["name"] - - if(in_forward == False and opt_name == opt): - in_forward = True - forward_rec_name = nml_rec.attrib["name"] - if(not found): - found = True - opt_type = nml_opt.attrib["type"] - opt_value = nml_opt.attrib["default_value"] - - try: - opt_possible_values = nml_opt.attrib["possible_values"] - except: - opt_possible_values = latex_missing_string - - try: - opt_units = nml_opt.attrib["units"] - if opt_units == "": - opt_units = latex_missing_string - else: - opt_units = "$%s$"%opt_units.replace(' ', '$ $') - except: - opt_units = latex_missing_string - - try: - opt_description = nml_opt.attrib["description"] - except: - opt_description = latex_missing_string - - if opt_possible_values == "": - opt_possible_values = latex_missing_string - - - if opt_description == "": - opt_description = latex_missing_string.replace('_','\_') - else: - equations = opt_description.find('$') - if equations != -1: - opt_desc_split = opt_description.split("$") - - if opt_description.replace('_','')[0] == "$": - replace = False - opt_description = "$" - else: - replace = True - opt_description = "" - - for part in opt_desc_split: - if replace: - opt_description = "%s %s"%(opt_description, part.replace('_','\_')) - replace = False - else: - opt_description = "%s $%s$"%(opt_description, part) - replace = True - else: - opt_description = "%s"%opt_description.replace('_','\_') -#}}} - - # Search analysis registry if not found yet - for nml_rec in analysis_registry.iter("nml_record"):#{{{ - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib["name"] - - if(in_analysis == False and opt_name == opt): - in_analysis = True - analysis_rec_name = nml_rec.attrib["name"] - if(not found): - found = True - opt_type = nml_opt.attrib["type"] - opt_value = nml_opt.attrib["default_value"] - - try: - opt_possible_values = nml_opt.attrib["possible_values"] - except: - opt_possible_values = latex_missing_string - - try: - opt_units = nml_opt.attrib["units"] - if opt_units == "": - opt_units = latex_missing_string - else: - opt_units = "$%s$"%opt_units.replace(' ', '$ $') - except: - opt_units = latex_missing_string - - try: - opt_description = nml_opt.attrib["description"] - except: - opt_description = latex_missing_string - - if opt_possible_values == "": - opt_possible_values = latex_missing_string - - - if opt_description == "": - opt_description = latex_missing_string.replace('_','\_') - else: - equations = opt_description.find('$') - if equations != -1: - opt_desc_split = opt_description.split("$") - - if opt_description.replace('_','')[0] == "$": - replace = False - opt_description = "$" - else: - replace = True - opt_description = "" - - for part in opt_desc_split: - if replace: - opt_description = "%s %s"%(opt_description, part.replace('_','\_')) - replace = False - else: - opt_description = "%s $%s$"%(opt_description, part) - replace = True - else: - opt_description = "%s"%opt_description.replace('_','\_') -#}}} - - # If option has been found, write it out as a section. - if(found): - opt_name = opt - #latex.write('\section[%s]{\hyperref[sec:nm_tab_%s]{%s}}\n'%(opt_name.replace('_','\_'),rec_name,opt_name.replace('_','\_'))) - latex.write('\section[%s]{%s}\n'%(opt_name.replace('_','\_'),opt_name.replace('_','\_'))) - latex.write('\label{sec:nm_sec_%s}\n'%opt_name) - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} || p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' In build modes: & ') - if(in_forward): - latex.write('\hyperref[subsec:forward_nm_tab_%s]{forward} '%(forward_rec_name)) - if(in_analysis): - latex.write('\hyperref[subsec:analysis_nm_tab_%s]{analysis} '%(analysis_rec_name)) - latex.write('\\\\\n') - - latex.write(' \hline\n') - latex.write(' Type: & %s \\\\\n'%opt_type.replace('_','\_')) - latex.write(' \hline\n') - latex.write(' Units: & %s \\\\\n'%opt_units.replace('_','\_')) - latex.write(' \hline\n') - latex.write(' Default Value: & %s \\\\\n'%opt_value.replace('_','\_')) - latex.write(' \hline\n') - latex.write(' Possible Values: & %s \\\\\n'%opt_possible_values.replace('_','\_')) - latex.write(' \hline\n') - latex.write(' \caption{%s: %s}\n'%(opt_name.replace('_','\_'), opt_description)) - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') -#}}} - -def write_variable_sections(latex, sorted_structs, forward_registry, analysis_registry):#{{{ - latex_missing_string = '{\\bf \color{red} MISSING}' - - #latex.write('\chapter[Variable definitions]{\hyperref[chap:variable_tables]{Variable definitions}}\n') - latex.write('\chapter[Variable definitions]{Variable definitions}\n') - latex.write('\label{chap:variable_sections}\n') -# latex.write('Embedded links point to information in chapter \\ref{chap:variable_tables}\n') - - for struct in sorted_structs: - ##latex.write('\section[%s]{\hyperref[sec:var_tab_%s]{%s}}\n'%(struct.replace('_','\_'),struct, struct.replace('_','\_'))) - latex.write('\section[%s]{%s}\n'%(struct.replace('_','\_'), struct.replace('_','\_'))) - latex.write('\label{sec:var_sec_%s}\n'%struct) - - unique_vars = []; - # Determine all variables in the current var struct from the forward mode - for var_struct in forward_registry.iter("var_struct"):#{{{ - struct_name = var_struct.attrib["name"] - if(struct_name == struct): - for var_arr in var_struct.iter("var_array"): - for var in var_arr.iter("var"): - name = var.attrib["name"] - if(unique_vars.count(name) == 0): - unique_vars.append(name) - for var in var_struct.iter("var"): - name = var.attrib["name"] - if(unique_vars.count(name) == 0): - unique_vars.append(name) -#}}} - - # Determine all variables in the current var struct from the analysis mode - for var_struct in analysis_registry.iter("var_struct"):#{{{ - struct_name = var_struct.attrib["name"] - if(struct_name == struct): - for var_arr in var_struct.iter("var_array"): - for var in var_arr.iter("var"): - name = var.attrib["name"] - if(unique_vars.count(name) == 0): - unique_vars.append(name) - for var in var_struct.iter("var"): - name = var.attrib["name"] - if(unique_vars.count(name) == 0): - unique_vars.append(name) - #}}} - - sorted_vars = sorted(unique_vars) - del unique_vars - - for var_name in sorted_vars: - found = False - in_forward = False - in_analysis = False - in_var_array = False - - # Try to extract var from forward mode - for var_struct in forward_registry.iter("var_struct"): #{{{ - struct_name = var_struct.attrib["name"] - if(struct_name == struct): - for var_arr in var_struct.iter("var_array"): - for var in var_arr.iter("var"): - name = var.attrib["name"] - if(name == var_name): - in_forward = True - - if(not found): - found = True - in_var_array = True - - struct_time_levs = var_struct.attrib['time_levs'] - var_arr_name = var_arr.attrib["name"] - var_type = var_arr.attrib["type"] - var_dims = var_arr.attrib["dimensions"] - try: - var_time_levels = var_arr.attrib["time_levs"] - except: - var_time_levels = var_struct.attrib["time_levs"] - - # Extract var persistence#{{{ - try: - var_persistence = var_arr.attrib['persistence'] - except: - var_persistence = 'persistent' -#}}} - - # Extract var units#{{{ - try: - var_units = var.attrib['units'] - if var_units == "": - var_units = latex_missing_string - else: - var_units = "$%s$"%var_units.replace(' ', '$ $') - except: - var_units = latex_missing_string -#}}} - - var_arr_group = var.attrib["array_group"] - - # Extract name in code, and build var_path#{{{ - try: - var_name_in_code = var.attrib['name_in_code'] - except: - var_name_in_code = var_name - - var_path = "%s"%(var_arr_name) -#}}} - - # Extract var description#{{{ - try: - var_description = var.attrib['description'] - except: - var_description = latex_missing_string.replace('_','\_') - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') -#}}} - - for var in var_struct.iter("var"): - name = var.attrib["name"] - if(name == var_name): - in_forward = True - - if(not found): - found = True - in_var_array = False - struct_time_levs = var_struct.attrib['time_levs'] - var_type = var.attrib["type"] - var_dims = var.attrib["dimensions"] - try: - var_time_levels = var.attrib["time_levs"] - except: - var_time_levels = var_struct.attrib["time_levs"] - - # Extract var persistence#{{{ - try: - var_persistence = var_arr.attrib['persistence'] - except: - var_persistence = 'persistent' -#}}} - - # Extract var units#{{{ - try: - var_units = var.attrib['units'] - if var_units == "": - var_units = latex_missing_string - else: - var_units = "$%s$"%var_units.replace(' ', '$ $') - except: - var_units = latex_missing_string -#}}} - - # Extract name in code, and build var_path#{{{ - try: - var_name_in_code = var.attrib['name_in_code'] - except: - var_name_in_code = var_name - - var_path = "%s"%(var_name) -#}}} - - # Extract var description#{{{ - try: - var_description = var.attrib['description'] - except: - var_description = latex_missing_string.replace('_','\_') - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') -#}}} -#}}} - - # Try to extract var from analysis mode - for var_struct in analysis_registry.iter("var_struct"): #{{{ - struct_name = var_struct.attrib["name"] - if(struct_name == struct): - for var_arr in var_struct.iter("var_array"): - for var in var_arr.iter("var"): - name = var.attrib["name"] - if(name == var_name): - in_analysis = True - - if(not found): - found = True - in_var_array = True - - struct_time_levs = var_struct.attrib['time_levs'] - var_arr_name = var_arr.attrib["name"] - var_type = var_arr.attrib["type"] - var_dims = var_arr.attrib["dimensions"] - try: - var_time_levels = var_arr.attrib["time_levs"] - except: - var_time_levels = var_struct.attrib["time_levs"] - - # Extract var persistence#{{{ - try: - var_persistence = var_arr.attrib['persistence'] - except: - var_persistence = 'persistent' -#}}} - - # Extract var units#{{{ - try: - var_units = var.attrib['units'] - if var_units == "": - var_units = latex_missing_string - else: - var_units = "$%s$"%var_units.replace(' ', '$ $') - except: - var_units = latex_missing_string -#}}} - - var_arr_group = var.attrib["array_group"] - - # Extract name in code, and build var_path#{{{ - try: - var_name_in_code = var.attrib['name_in_code'] - except: - var_name_in_code = var_name - - if int(struct_time_levs) > 1: - var_index = "domain %% blocklist %% %s %% index_%s"%(struct_name, var_name_in_code.replace('_','\_')) - var_path = "%s"%(var_arr_name) - else: - var_index = "domain %% blocklist %% %s %% index_%s"%(struct_name, var_name_in_code.replace('_','\_')) - var_path = "%s"%(var_arr_name) -#}}} - - # Extract var description#{{{ - try: - var_description = var.attrib['description'] - except: - var_description = latex_missing_string.replace('_','\_') - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') -#}}} - - for var in var_struct.iter("var"): - name = var.attrib["name"] - if(name == var_name): - in_analysis = True - - if(not found): - found = True - in_var_array = False - struct_time_levs = var_struct.attrib['time_levs'] - var_type = var.attrib["type"] - var_dims = var.attrib["dimensions"] - try: - var_time_levels = var.attrib["time_levs"] - except: - var_time_levels = var_struct.attrib["time_levs"] - - # Extract var persistence#{{{ - try: - var_persistence = var_arr.attrib['persistence'] - except: - var_persistence = 'persistent' -#}}} - - # Extract var units#{{{ - try: - var_units = var.attrib['units'] - if var_units == "": - var_units = latex_missing_string - else: - var_units = "$%s$"%var_units.replace(' ', '$ $') - except: - var_units = latex_missing_string -#}}} - - # Extract name in code, and build var_path#{{{ - try: - var_name_in_code = var.attrib['name_in_code'] - except: - var_name_in_code = var_name - - if int(struct_time_levs) > 1: - var_path = "domain %% blocklist %% %s %% time_levs(:) %% %s %% %s"%(struct_name, struct_name, var_name) - else: - var_path = "domain %% blocklist %% %s %% %s"%(struct_name, var_name) -#}}} - - # Extract var description#{{{ - try: - var_description = var.attrib['description'] - except: - var_description = latex_missing_string.replace('_','\_') - - if var_description == "": - var_description = latex_missing_string.replace('_','\_') - else: - equations = var_description.find('$') - if equations != -1: - var_desc_split = var_description.split("$") - - if var_description.replace('_','')[0] == "$": - replace = False - var_description = "$" - else: - replace = True - var_description = "" - - for part in var_desc_split: - if replace: - var_description = "%s %s"%(var_description, part.replace('_','\_')) - replace = False - else: - var_description = "%s $%s$"%(var_description, part) - replace = True - else: - var_description = "%s"%var_description.replace('_','\_') -#}}} -#}}} - - # Build stream list from forward mode - forward_streams = ""#{{{ - if(in_forward): - for streams in forward_registry.iter("streams"): - for stream in streams.iter("stream"): - for var in stream.iter("var"): - name = var.attrib["name"] - if(name == var_name): - forward_streams = "%s %s"%(forward_streams, stream.attrib["name"]) - #}}} - - # Build stream list from analysis mode - analysis_streams = ""#{{{ - if(in_analysis): - for streams in analysis_registry.iter("streams"): - for stream in streams.iter("stream"): - for var in stream.iter("var"): - name = var.attrib["name"] - if(name == var_name): - analysis_streams = "%s %s"%(analysis_streams, stream.attrib["name"]) - #}}} - - if(found): - struct_name = struct - #latex.write('\subsection[%s]{\hyperref[sec:var_tab_%s]{%s}}\n'%(var_name.replace('_','\_'),struct_name, var_name.replace('_','\_'))) - latex.write('\subsection[%s]{%s}\n'%(var_name.replace('_','\_'), var_name.replace('_','\_'))) - latex.write('\label{subsec:var_sec_%s_%s}\n'%(struct_name,var_name)) - # Tabular Format: - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} | p{4.0in} |}\n') - latex.write(' \hline \n') - latex.write(' In build modes: & ') - if(in_forward): - latex.write('\hyperref[subsec:forward_var_tab_%s]{forward} '%(struct)) - if(in_analysis): - latex.write('\hyperref[subsec:analysis_var_tab_%s]{analysis} '%(struct)) - latex.write('\\\\\n') - latex.write(' \hline \n') - latex.write(' Type: & %s \\\\\n'%var_type) - latex.write(' \hline \n') - latex.write(' Units: & %s \\\\\n'%var_units) - latex.write(' \hline \n') - latex.write(' Dimension: & %s \\\\\n'%var_dims) - latex.write(' \hline \n') - latex.write(' Persistence: & %s \\\\\n'%var_persistence) - latex.write(' \hline \n') - latex.write(' Number of time levels: & %s \\\\\n'%var_time_levels) - latex.write(' \hline \n') - - if(in_var_array): - latex.write(" Index in `%s' Array: & `index\_%s' in `%s' pool \\\\\n"%(var_path.replace('_','\_'), var_name.replace('_', '\_'), struct_name.replace('_','\_'))) - latex.write(' \hline \n') - pool_path="`%s' in `%s' pool"%(var_path, struct_name) - latex.write(' Pool path: & %s \\\\\n'%pool_path.replace('_', '\_')) - latex.write(' \hline \n') - if(in_var_array): - latex.write(' Array Group: & %s \\\\\n'%var_arr_group.replace('_','\_')) - latex.write(' \hline \n') - latex.write(' \caption{%s: %s}\n'%(var_name.replace('_','\_'),var_description)) - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - -#}}} - -if not options.forward_registry_path: - parser.error("The forward mode's Registry file is required") - -if not options.analysis_registry_path: - parser.error("The analysis mode's Registry file is required") - -if not options.latex_dir: - print 'Directory with group latex files is missing. Skipping addition of latex files.' - extra_latex = False -else: - if not options.latex_path: - parser.error('Need latex path with latex directory.') - extra_latex = True - -latex_missing_string = '{\\bf \color{red} MISSING}' -dimension_table_header = '{\\bf Name} & {\\bf Units} & {\\bf Description}' -variable_table_header = '{\\bf Name} & {\\bf Description}' -namelist_table_header = '{\\bf Name} & {\\bf Description}' - -forward_registry_path = options.forward_registry_path -analysis_registry_path = options.analysis_registry_path - -try: - forward_registry_tree = ET.parse(forward_registry_path) -except: - parser.error('%s does not exist or is not parsable. Exiting.'%forward_registry_path) - -try: - analysis_registry_tree = ET.parse(analysis_registry_path) -except: - parser.error('%s does not exist or is not parsable. Exiting.'%analysis_registry_path) - -forward_registry = forward_registry_tree.getroot() -analysis_registry = analysis_registry_tree.getroot() - -# Build dimension lists -forward_dims = []; -analysis_dims = []; -unique_dims = []; - -for dims in forward_registry.iter("dims"): - for dim in dims.iter("dim"): - dim_name = dim.attrib["name"] - forward_dims.append(dim_name) - if(unique_dims.count(dim_name) == 0): - unique_dims.append(dim_name) - -for dims in analysis_registry.iter("dims"): - for dim in dims.iter("dim"): - dim_name = dim.attrib["name"] - analysis_dims.append(dim_name) - if(unique_dims.count(dim_name) == 0): - unique_dims.append(dim_name) - -sorted_dims = sorted(unique_dims) -del unique_dims - -# Build structure lists -forward_structs = []; -analysis_structs = []; -unique_structs = []; - -for struct in forward_registry.iter("var_struct"): - struct_name = struct.attrib["name"] - forward_structs.append(struct_name) - if(unique_structs.count(struct_name) == 0): - unique_structs.append(struct_name) - -for struct in analysis_registry.iter("var_struct"): - struct_name = struct.attrib["name"] - analysis_structs.append(struct_name) - if(unique_structs.count(struct_name) == 0): - unique_structs.append(struct_name) - -sorted_structs = sorted(unique_structs) -del unique_structs - - -# Build Namelist lists -forward_opts = []; -analysis_opts = []; -unique_opts = []; - -for nml_rec in forward_registry.iter("nml_record"): - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib["name"] - forward_opts.append(opt_name) - if(unique_opts.count(opt_name) == 0): - unique_opts.append(opt_name) - -for nml_rec in analysis_registry.iter("nml_record"): - for nml_opt in nml_rec.iter("nml_option"): - opt_name = nml_opt.attrib["name"] - analysis_opts.append(opt_name) - if(unique_opts.count(opt_name) == 0): - unique_opts.append(opt_name) - -sorted_opts = sorted(unique_opts) -del unique_opts - -# Write file that defines version string for model. -latex = open('define_version.tex', 'w+') -try: - version_string = forward_registry.attrib['version'] -except: - version_string = '{\\bf MISSING}' -latex.write('\\newcommand{\\version}{%s}\n'%version_string) -latex.close() - -# Write file to include for forward mode -# It should have sections for dimensions, namelist options/records, and -# variable and their structures. -latex = open('mode_forward_sections.tex', 'w+') -mode = 'forward' -write_dimension_table(latex, forward_registry, mode) -write_namelist_table(latex, forward_registry, mode) -write_variable_table(latex, forward_registry, mode) -latex.close() - -# Write file to include for analysis mode -# It should have sections for dimensions, namelist options/records, and -# variable and their structures. -latex = open('mode_analysis_sections.tex', 'w+') -mode = 'analysis' -write_dimension_table(latex, analysis_registry, mode) -write_namelist_table(latex, analysis_registry, mode) -write_variable_table(latex, analysis_registry, mode) -latex.close() - - -latex = open('namelist_sections.tex', 'w+') -write_namelist_sections(latex, sorted_opts, forward_registry, analysis_registry) -latex.close() - -latex = open('variable_sections.tex', 'w+') -write_variable_sections(latex, sorted_structs, forward_registry, analysis_registry) -latex.close() - - diff --git a/python_scripts/namelist_generation/parse_xml_registry.py b/python_scripts/namelist_generation/parse_xml_registry.py deleted file mode 100755 index 648abd4bc..000000000 --- a/python_scripts/namelist_generation/parse_xml_registry.py +++ /dev/null @@ -1,609 +0,0 @@ -#!/usr/bin/env python - -""" -This script parses a MPAS Registry.xml file to generates documentation for a -users or developers guide. - -Typical usage is as follows:: - - # set the core, one of ocean, landice, cice, etc. - export CORE= - # Set your repo directories: - export MPAS_REPO=~/repos/MPAS - export MPAS_TOOLS_REPO=~/repos/MPAS-Tools - export MPAS_DOCUMENTS_REPO=~/repos/MPAS-Documents - cd $MPAS_REPO - # Compile MPAS so you have a src/core_ocean/Registry_processed.xml file. - # Change the compiler as needed. - make CORE=$CORE gfortran - cd $MPAS_DOCUMENTS_REPO/users_guide/$CORE - # clean up blank lines at the top of the XML file - sed '/./,$!d' $MPAS_REPO/src/core_${CORE}/Registry_processed.xml > \ - Registry_cleaned.xml - $MPAS_TOOLS_REPO/python_scripts/namelist_generation/parse_xml_registry.py \ - -f Registry_cleaned.xml -d section_descriptions \ - -p ${CORE}/section_descriptions - cd .. - make clean CORE=$CORE - make CORE=$CORE - -The -f flag points to the processed registry file (typically with a full path). - -The -d flag points to the local or full path to .tex files that containing -section descriptions for providing additional information in the output latex -documentation. - -Section descriptions are required to be named whatever the section is. For -example, in a namelist, there might be a namelist record named -"&time_management". The script searches the directory listed with the -d -flag for a latex file named time_management.tex, and adds an input line to -the output latex documentation to include this file. - -The -p flag specifies the relative path inside the latex documentation where -the file should be input from. As an example, one might -run it as follows to generate the ocean core's documentation:: - - ./parse_xml_registry.xml -f mpas_root/src/core_ocean/Registry.xml \ - -d mpas_doc_root/users_guide/ocean/section_descriptions \ - -p ocean/section_descriptions - -On output, several files are created which are listed below. - namelist.input.generated - A default namelist.input file for the core that - owns the Registry.xml file. - dimensions.tex - A tabulated description of the dimensions for the core. - namelist_table_documentation.tex - A tabulated description of the namelist - options for the core. - namelist_section_documentation.tex - A more detailed section format - description of the namelist options - for the core. - variable_table_documentation.tex - A tabulated description of the variables - in the core. - variable_section_documentation.tex - A more detailed section formate - description of the variable in the - core. - define_version.tex - A simple file which can be included to define \version - inside the users guide. - -Authors: -======== -Doug Jacobsen, Xylar Asay-Davis -""" - - -import os -from optparse import OptionParser -import xml.etree.ElementTree as ET -from collections import OrderedDict -from PIL import ImageFont -import pkg_resources - - -def break_string(string, maxLength=150., font='cmunrm.otf', fontSize=10): - # {{{ - - # Note: max_length is in points, so 144. corresponds to 2 inches, the - # column width for namelist and variable names in tables in the user guide - - # font defaults to LaTex font (Computer Modern), and user guide font size - # in tables - - # if an absolute path to the font was not supplied, look relative to this - # script - if not os.path.isabs(font): - font = pkg_resources.resource_filename(__name__, font) - - font = ImageFont.truetype(font, fontSize) - size = font.getsize(string) - if size[0] <= maxLength: - # no need to split - return None - - bestBreakPoints = [] - - # first alpha-numeric character after a non-alpha-numeric character - for index in range(1, len(string)): - if not string[index-1].isalnum() and string[index].isalnum(): - bestBreakPoints.append(index) - - # find uppercase following lowercase or number - for index in range(1, len(string)): - if string[index-1].isalnum() and string[index-1].islower() \ - and string[index].isalpha() and string[index].isupper(): - bestBreakPoints.append(index) - - bestBreakPoints.append(len(string)) - - bestBreakPoints = sorted(bestBreakPoints) - - for index in range(1, len(bestBreakPoints)): - breakPoint = bestBreakPoints[index] - size = font.getsize(string[:breakPoint]) - if size[0] > maxLength: - breakPoint = bestBreakPoints[index-1] - return breakPoint - - # there is no good break point so we have to find an arbitrary one - print "Warning: no good breakpoint found for {}".format(string) - for breakPoint in range(1, len(string)+1): - breakPoint = bestBreakPoints[index] - size = font.getsize(string[:breakPoint]) - if size[0] > maxLength: - breakPoint = breakPoint-1 - return breakPoint - - raise ValueError("Could not find a breakpoint for {}".format(string)) - # }}} - - -def write_namelist_input_generated(): - # Write default namelist - namelist = open('namelist.input.generated', 'w') - for nml_rec in registry.iter("nml_record"): - namelist.write('&%s\n' % nml_rec.attrib['name']) - for nml_opt in nml_rec.iter("nml_option"): - if nml_opt.attrib['type'] == "character": - namelist.write('\t%s = "%s"\n' % ( - nml_opt.attrib['name'], - nml_opt.attrib['default_value'])) - else: - namelist.write('\t%s = %s\n' % ( - nml_opt.attrib['name'], - nml_opt.attrib['default_value'])) - - namelist.write('/\n') - - -def escape_underscore(string): - has_math_mode = (string.find('$') != -1) - if has_math_mode: - dim_desc_split = string.split("$") - replace = True - string = "" - for part in dim_desc_split: - if replace: - part = part.replace('_', '\_') - string = "{}{}".format(string, part) - replace = False - else: - string = "{}${}$".format(string, part) - replace = True - else: - string = string.replace('_', '\_') - return string - - -def get_attrib(element, attributeName, missingValue=None): - if missingValue is None: - missingValue = latex_missing_string - try: - attrib = element.attrib[attributeName] - except KeyError: - attrib = missingValue - if attrib == "": - attrib = missingValue - return attrib - - -def get_units(element): - units = get_attrib(element, 'units') - if units != latex_missing_string: - # units with the siunitx package - units = "\si{{{}}}".format(units.replace(' ', '.')) - units = escape_underscore(units) - return units - - -def get_description(element): - description = get_attrib(element, 'description') - description = escape_underscore(description) - return description - - -def get_linked_name(name, link): - indices = [] - index = 0 - while True: - newIndex = break_string(name[index:]) - if newIndex is None: - break - index += newIndex - indices.append(index) - - indices.append(len(name)) - newName = escape_underscore(name[0:indices[0]]) - for start, end in zip(indices[0:-1], indices[1:]): - namePiece = escape_underscore(name[start:end]) - newName = '{}\\-{}'.format(newName, namePiece) - - return '\hyperref[subsec:%s]{%s}' % (link, newName) - - -def write_var_struct_to_table(latex, var_struct, struct_name): - for node in var_struct: - if node.tag == 'var_struct': - write_var_struct_to_table(latex, node, struct_name) - elif node.tag == 'var_array': - write_var_array_to_table(latex, node, struct_name) - elif node.tag == 'var': - write_var_to_table(latex, node, struct_name) - - -def write_var_array_to_table(latex, var_array, struct_name): - for var in var_array.iter("var"): - write_var_to_table(latex, var, struct_name) - - -def write_var_to_table(latex, var, struct_name): - var_name = var.attrib['name'] - var_description = get_description(var) - - link = 'var_sec_{}_{}'.format(struct_name, var_name) - linkedName = get_linked_name(var_name, link) - - latex.write(' {} & {} \\\\\n'.format(linkedName, - var_description)) - latex.write(' \hline\n') - - -def get_var_structs(): - # use a dictionary to create lists of all top-level var_structs with the - # same name (e.g. state, tracers, mesh) - var_structs = OrderedDict() - for var_struct in registry: - if var_struct.tag != "var_struct": - continue - struct_name = var_struct.attrib['name'] - if struct_name in var_structs.keys(): - var_structs[struct_name].append(var_struct) - else: - var_structs[struct_name] = [var_struct] - return var_structs - - -def write_var_struct_section(latex, var_struct, struct_name, has_time): - for node in var_struct: - if node.tag == 'var_struct': - write_var_struct_section(latex, node, struct_name, has_time) - elif node.tag == 'var_array': - write_var_array_section(latex, node, struct_name, has_time) - elif node.tag == 'var': - write_var_section(latex, node, struct_name, has_time) - - -def write_var_array_section(latex, var_array, struct_name, has_time): - for var in var_array.iter("var"): - write_var_section(latex, var, struct_name, has_time, var_array) - - -def write_var_section(latex, var, struct_name, has_time, var_array=None): - var_name = var.attrib['name'] - var_name_escaped = escape_underscore(var_name) - if var_array is None: - var_type = var.attrib['type'] - dimensions = var.attrib['dimensions'] - else: - var_arr_name = escape_underscore(var_array.attrib['name']) - var_type = var_array.attrib['type'] - dimensions = var_array.attrib['dimensions'] - - persistence = get_attrib(var, "persistence", missingValue='persistent') - name_in_code = get_attrib(var, "name_in_code", missingValue=var_name) - units = get_units(var) - description = get_description(var) - - if has_time: - var_path = "domain % blocklist % {} % time_levs(:) % {} % {}".format( - struct_name, struct_name, var_name) - else: - var_path = "domain % blocklist % {} % {}".format(struct_name, var_name) - - var_path = escape_underscore(var_path).replace('%', '\%') - - latex.write('\subsection[%s]{\hyperref[sec:var_tab_%s]{%s}}\n' % ( - var_name_escaped, struct_name, var_name_escaped)) - latex.write('\label{subsec:var_sec_%s_%s}\n' % (struct_name, var_name)) - # Tabular Format: - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} | p{4.0in} |}\n') - latex.write(' \hline \n') - latex.write(' Type: & %s \\\\\n' % var_type) - latex.write(' \hline \n') - latex.write(' Units: & %s \\\\\n' % units) - latex.write(' \hline \n') - latex.write(' Dimension: & %s \\\\\n' % dimensions) - latex.write(' \hline \n') - latex.write(' Persistence: & %s \\\\\n' % (persistence)) - latex.write(' \hline \n') - - if var_array is not None: - array_group = escape_underscore(var.attrib['array_group']) - index = "domain % blocklist % {} % index_{}".format(struct_name, - name_in_code) - index = escape_underscore(index).replace('%', '\%') - - latex.write(' Index in %s Array: & %s \\\\\n' % (var_arr_name, - index)) - latex.write(' \hline \n') - - latex.write(' Location in code: & %s \\\\\n' % (var_path)) - latex.write(' \hline \n') - - if var_array is not None: - latex.write(' Array Group: & %s \\\\\n' % (array_group)) - latex.write(' \hline \n') - - latex.write(' \caption{%s: %s}\n' % (var_name_escaped, description)) - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - - -def write_dimension_table_documentation(): - # Write dimension table documentation latex file. - latex = open('dimension_table_documentation.tex', 'w') - latex.write('\chapter{Dimensions}\n') - latex.write('\label{chap:dimensions}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{1.0in} || p{1.0in} | p{4.0in} |}\n') - latex.write(' \hline \n') - latex.write(' {} \\endfirsthead\n'.format(dimension_table_header)) - latex.write(' \hline \n') - latex.write(' {} (Continued) \\endhead\n'.format( - dimension_table_header)) - latex.write(' \hline \n') - latex.write(' \hline \n') - for dims in registry.iter("dims"): - for dim in dims.iter("dim"): - name = dim.attrib['name'] - name = escape_underscore(name) - units = get_units(dim) - description = get_description(dim) - - latex.write(' {} & {} & {} \\\\ \n'.format( - name, units, description)) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') - latex.close() - - -def write_namelist_table_documentation(): - # Write namelist table documentation latex file. - latex = open('namelist_table_documentation.tex', 'w') - latex.write('\chapter[Namelist options]{\hyperref[chap:namelist_sections]' - '{Namelist options}}\n') - latex.write('\label{chap:namelist_tables}\n') - latex.write('Embedded links point to more detailed namelist information ' - 'in the appendix.\n') - for nml_rec in registry.iter("nml_record"): - rec_name = nml_rec.attrib['name'] - rec_name_escaped = escape_underscore(rec_name) - latex.write('\section[%s]{\hyperref[sec:nm_sec_%s]{%s}}\n' % ( - rec_name_escaped, rec_name, rec_name_escaped)) - latex.write('\label{sec:nm_tab_%s}\n' % (rec_name)) - - # Add input line if file exists. - if os.path.exists('%s/%s.tex' % (options.latex_dir, rec_name)): - latex.write('\input{%s/%s.tex}\n' % (options.latex_path, rec_name)) - else: - print 'Warning, namelist description latex file not found: ' \ - '%s/%s.tex' % (options.latex_dir, rec_name) - latex.write('') - - latex.write('\\vspace{0.5in}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} || p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' %s \\endfirsthead\n' % namelist_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n' % namelist_table_header) - latex.write(' \hline\n') - latex.write(' \hline\n') - - for nml_opt in nml_rec.iter("nml_option"): - name = nml_opt.attrib['name'] - - description = get_description(nml_opt) - - link = 'nm_sec_{}'.format(name) - linkedName = get_linked_name(name, link) - - latex.write(' {} & {} \\\\\n'.format(linkedName, description)) - latex.write(' \hline\n') - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') - latex.close() - - -def write_namelist_section_documentation(): - # Write namelist section documentation latex file. - latex = open('namelist_section_documentation.tex', 'w') - latex.write('\chapter[Namelist options]{\hyperref[chap:namelist_tables]' - '{Namelist options}}\n') - latex.write('\label{chap:namelist_sections}\n') - latex.write('Embedded links point to information in chapter ' - '\\ref{chap:namelist_tables}\n') - for nml_rec in registry.iter("nml_record"): - rec_name = nml_rec.attrib["name"] - rec_name_escaped = escape_underscore(rec_name) - - latex.write('\section[%s]{\hyperref[sec:nm_tab_%s]{%s}}\n' % ( - rec_name_escaped, rec_name, rec_name_escaped)) - latex.write('\label{sec:nm_sec_%s}\n' % rec_name) - - for nml_opt in nml_rec.iter("nml_option"): - name = nml_opt.attrib["name"] - name_escaped = escape_underscore(name) - opt_type = escape_underscore(nml_opt.attrib["type"]) - default_value = escape_underscore(get_attrib(nml_opt, - "default_value")) - possible_values = escape_underscore(get_attrib(nml_opt, - "possible_values")) - units = get_units(nml_opt) - description = get_description(nml_opt) - - try: - opt_icepack_name = nml_opt.attrib["icepack_name"] - except KeyError: - opt_icepack_name = None - - latex.write('\subsection[%s]{\hyperref[sec:nm_tab_%s]{%s}}\n' % ( - name_escaped, rec_name, name_escaped)) - latex.write('\label{subsec:nm_sec_%s}\n' % name) - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} || p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' Type: & %s \\\\\n' % opt_type) - latex.write(' \hline\n') - latex.write(' Units: & %s \\\\\n' % units) - latex.write(' \hline\n') - latex.write(' Default Value: & %s \\\\\n' % default_value) - latex.write(' \hline\n') - latex.write(' Possible Values: & %s \\\\\n' % possible_values) - latex.write(' \hline\n') - if (opt_icepack_name is not None): - latex.write(' Icepack name: & \\verb+%s+ \\\\\n' % - opt_icepack_name) - latex.write(' \hline\n') - latex.write(' \caption{%s: %s}\n' % (name_escaped, description)) - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.close() - - -def write_variable_table_documentation(): - - # Write variable table documentation latex file - latex = open('variable_table_documentation.tex', 'w') - latex.write('\chapter[Variable definitions]' - '{\hyperref[chap:variable_sections]' - '{Variable definitions}}\n') - latex.write('\label{chap:variable_tables}\n') - latex.write('Embedded links point to more detailed variable information ' - 'in the appendix.\n') - - var_structs = get_var_structs() - - for struct_name, var_struct_list in var_structs.items(): - struct_name_escaped = escape_underscore(struct_name) - latex.write('\section[%s]{\hyperref[sec:var_sec_%s]{%s}}\n' % ( - struct_name_escaped, struct_name, struct_name_escaped)) - latex.write('\label{sec:var_tab_%s}\n' % struct_name) - - if os.path.exists('%s/%s_struct.tex' % (options.latex_dir, - struct_name)): - latex.write('\input{%s/%s_struct.tex}\n' % (options.latex_path, - struct_name)) - else: - print 'Warning, variable section description latex file not ' \ - 'found: %s/%s_struct.tex' % (options.latex_dir, struct_name) - latex.write('') - - latex.write('\\vspace{0.5in}\n') - latex.write('{\small\n') - latex.write('\\begin{center}\n') - latex.write('\\begin{longtable}{| p{2.0in} | p{4.0in} |}\n') - latex.write(' \hline\n') - latex.write(' %s \\endfirsthead\n' % variable_table_header) - latex.write(' \hline \n') - latex.write(' %s (Continued) \\endhead\n' % variable_table_header) - latex.write(' \hline\n') - - for var_struct in var_struct_list: - write_var_struct_to_table(latex, var_struct, struct_name) - - latex.write('\end{longtable}\n') - latex.write('\end{center}\n') - latex.write('}\n') - latex.close() - - -def write_variable_section_documentation(): - - # Write variable section documentation latex file - latex = open('variable_section_documentation.tex', 'w') - latex.write('\chapter[Variable definitions]' - '{\hyperref[chap:variable_tables]' - '{Variable definitions}}\n') - latex.write('\label{chap:variable_sections}\n') - latex.write('Embedded links point to information in chapter ' - '\\ref{chap:variable_tables}\n') - - var_structs = get_var_structs() - - for struct_name, var_struct_list in var_structs.items(): - struct_name_escaped = escape_underscore(struct_name) - - latex.write('\section[%s]{\hyperref[sec:var_tab_%s]{%s}}\n' % ( - struct_name_escaped, struct_name, struct_name_escaped)) - latex.write('\label{sec:var_sec_%s}\n' % struct_name) - - for var_struct in var_struct_list: - try: - struct_time_levs = var_struct.attrib['time_levs'] - has_time = int(struct_time_levs) > 1 - except KeyError: - has_time = False - - write_var_struct_section(latex, var_struct, struct_name, has_time) - - latex.close() - - -parser = OptionParser() -parser.add_option("-f", "--file", dest="registry_path", - help="Path to Registry file", metavar="FILE") -parser.add_option("-d", "--tex_dir", dest="latex_dir", - help="Path to directory with latex addition files.", - metavar="DIR") -parser.add_option("-p", "--tex_path", dest="latex_path", - help="Path to latex input files that will be written to " - "generated latex.", metavar="PATH") - -options, args = parser.parse_args() - -if not options.registry_path: - parser.error("Registry file is required") - -if not options.latex_dir: - parser.error('Directory with group latex files is missing. Skipping ' - 'addition of latex files.') -if not options.latex_path: - parser.error('Need latex path with latex directory.') - -latex_missing_string = '{\\bf \color{red} MISSING}' -dimension_table_header = '{\\bf Name} & {\\bf Units} & {\\bf Description}' -variable_table_header = '{\\bf Name} & {\\bf Description}' -namelist_table_header = '{\\bf Name} & {\\bf Description}' - -registry_path = options.registry_path - -registry_tree = ET.parse(registry_path) - -registry = registry_tree.getroot() - -write_namelist_input_generated() - -# Write file that defines version string for model. -latex = open('define_version.tex', 'w') -try: - version_string = registry.attrib['version'] -except KeyError: - version_string = '{\\bf MISSING}' -latex.write('\\newcommand{\\version}{%s}\n' % version_string) -latex.close() - -write_dimension_table_documentation() - -write_namelist_table_documentation() - -write_namelist_section_documentation() - -write_variable_table_documentation() - -write_variable_section_documentation() diff --git a/python_scripts/update_version_numbers/README b/python_scripts/update_version_numbers/README deleted file mode 100644 index 784eb045b..000000000 --- a/python_scripts/update_version_numbers/README +++ /dev/null @@ -1,25 +0,0 @@ -Author: Doug Jacobsen -Date: 05/02/2013 - -About: - This script (update_versions.py) is used to increment version numbers in - the MPAS Core Registry.xml files. - - It can be run from the root MPAS directory (with the root Makefile). - - It searches the current directory for all files with .xml extensions. Then - extracts the version number from each of them and increments the - appropriate version number. Then re-writes the file. - - To script can be run as follows: - ./update_versions.py [--major] [--minor] - - The --major and --minor flags are optional. If both are omitted, the script - does nothing. - - The --major flag increments the major version number by 1, and resets the - minor version number to zero. - - The --minor flag increments the minor version number by 1, and ignores the - major version number. - diff --git a/python_scripts/update_version_numbers/update_versions.py b/python_scripts/update_version_numbers/update_versions.py deleted file mode 100755 index c8f7b037d..000000000 --- a/python_scripts/update_version_numbers/update_versions.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/python -from optparse import OptionParser -import xml.etree.ElementTree as ET -import os - - -parser = OptionParser() -parser.add_option("--major", action="store_true", dest="major", help="Increment Major Version (Auto-resets minor version.") -parser.add_option("--minor", action="store_true", dest="minor", help="Increment Minor Version.") - -options, args = parser.parse_args() - -if not options.major and not options.minor: - parser.error('Either major or minor version is required.') - -for r, d, f in os.walk("."): - for files in f: - if files.endswith(".xml"): - path = os.path.join(r, files) - registry_tree = ET.parse(path) - registry = registry_tree.getroot() - version = registry.attrib['version'] - version = version.split('.') - major_ver = int(version[0]) - minor_ver = int(version[1]) - - if options.major: - new_major_ver = major_ver + 1 - new_minor_ver = 0 - elif options.minor: - new_major_ver = major_ver - new_minor_ver = minor_ver + 1 - - print "%s version: %d.%d"%(path, new_major_ver, new_minor_ver) - - registry_file = open(path, 'r+') - - lines = registry_file.readlines() - registry_file.seek(0) - registry_file.truncate() - for line in lines: - if 'version="%d.%d"'%(major_ver,minor_ver) in line: - if 'xml' in line: - new_line = line - else: - new_line = line.replace('%d.%d'%(major_ver, minor_ver), '%d.%d'%(new_major_ver, new_minor_ver)) - else: - new_line = line - registry_file.write(new_line) - elif files == "README.md": - path = os.path.join(r, files) - readme_file = open(path, 'r+') - - lines = readme_file.readlines() - readme_file.seek(0) - readme_file.truncate() - - for line in lines: - if line.find('MPAS-v') >= 0: - version_num = line.replace('MPAS-v', '') - version_array = version_num.split('.') - major_ver = int(version_array[0]) - minor_ver = int(version_array[1]) - - if options.major: - new_major_ver = major_ver + 1 - new_minor_ver = 0 - elif options.minor: - new_major_ver = major_ver - new_minor_ver = minor_ver + 1 - - print "%s version: %d.%d"%(path, new_major_ver, new_minor_ver) - - readme_file.write(line.replace('v%d.%d'%(major_ver, minor_ver), 'v%d.%d'%(new_major_ver, new_minor_ver))) - else: - readme_file.write(line) - diff --git a/readthedocs.yml b/readthedocs.yml new file mode 100644 index 000000000..afe4968d0 --- /dev/null +++ b/readthedocs.yml @@ -0,0 +1,31 @@ +# .readthedocs.yml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Build documentation in the conda_package/docs/ directory with Sphinx +sphinx: + configuration: conda_package/docs/conf.py + +# Optionally build your docs in additional formats such as PDF and ePub +# Build PDF +formats: + - pdf + +# Optionally set the version of Python and requirements required to build your docs +python: + version: 3.7 + install: + - method: setuptools + path: conda_package/ + system_packages: true + +conda: + environment: conda_package/docs/environment.yml + +build: + image: latest + + diff --git a/seaice/README b/seaice/README new file mode 100644 index 000000000..ae26b3447 --- /dev/null +++ b/seaice/README @@ -0,0 +1 @@ +Readme file for MPAS-Tools seaice directory. diff --git a/python_scripts/acme_namelist_file_generator/generate_acme_namelist_files.py b/source_code_processing/acme_namelist_file_generator/generate_acme_namelist_files.py similarity index 100% rename from python_scripts/acme_namelist_file_generator/generate_acme_namelist_files.py rename to source_code_processing/acme_namelist_file_generator/generate_acme_namelist_files.py diff --git a/python_scripts/mpas_source_linter/mpas_source_linter.py b/source_code_processing/mpas_source_linter/mpas_source_linter.py similarity index 100% rename from python_scripts/mpas_source_linter/mpas_source_linter.py rename to source_code_processing/mpas_source_linter/mpas_source_linter.py diff --git a/visualization/cross_section/ColdHot.m b/visualization/cross_section/ColdHot.m deleted file mode 100644 index c93e0629f..000000000 --- a/visualization/cross_section/ColdHot.m +++ /dev/null @@ -1,44 +0,0 @@ -function B = ColdHot(m) -% A colormap for blue cold, white zero, Hot positives. - -if nargin < 1, m = 256; end - -n = fix(m/8); - -% Create cold part: -A = [ - 102 0 102; - 0 41 253; - 102 153 255; - 41 255 255; - 255 255 255]/255; -%A = ones(size(A)) - A; - -v = [n-1 n n n]; - -cold = linspacev(A,v); - -% Create hot part: -A = [ - 255 255 255; - 255 255 0; - 255 102 41; - 255 0 0; - 102 41 0]/255; - -v = [n n n n-1]; -myhot = linspacev(A,v); - - -B = [cold; myhot]; - -%B = [B; flipud(hot(fix(m/2)))]; - - -% Original cold part, 8/2/02: -A = [ - 102 0 102; - 41 0 153; - 0 0 204; - 42 102 255; - 255 255 255]/255; \ No newline at end of file diff --git a/visualization/cross_section/cross_section.m b/visualization/cross_section/cross_section.m deleted file mode 100644 index b6cf8bb10..000000000 --- a/visualization/cross_section/cross_section.m +++ /dev/null @@ -1,219 +0,0 @@ -%function cross_section - -% Plot cross-sections of MPAS fields. -% -% This is the main function, where the user can specify data files, -% coordinates and text, then call functions to find sections, load -% data, and plot cross-sections. -% -% The final product is a set of plots as jpg files, a latex file, -% and a compiled pdf file of the plots, if desired. -% -% Mark Petersen, MPAS-Ocean Team, LANL, March 2014 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify data files -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% all plots are placed in the f directory. Comment out if not needed. -unix('mkdir -p f docs'); - -% The text string [wd '/' sim(i).dir '/' sim(i).netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. - -wd = '/var/tmp/mpeterse/runs'; - -% These files only need to contain a small number of variables. -% You may need to reduce the file size before copying to a local -% machine using, and also add the variables you want to plot. -% ncks -v nAverage,latVertex,lonVertex,verticesOnEdge,edgesOnVertex,refLayerThickness,dvEdge,latCell,lonCell,refBottomDepth,areaCell,xCell,yCell,zCell,xVertex,yVertex,zVertex,cellsOnVertex,maxLevelCell \ -% file_in.nc file_out.nc - -sim(1).dir = 'm91a'; -sim(1).netcdf_file = ['output.0010-02-01_00.00.00.nc_transport_vars.nc']; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% see exampleSections.m for more example sections. - -% sectionText a cell array with text describing each section -sectionText = { -'ACC 0E lon',... -'ACC 30E lon',... -'ACC 60E lon',... -'ACC 90E lon',... -'Drake Pass 65W lon',... -'ACC Tasman 147E lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - -67 0 -35 0;... % S Oc Tas - -67 30 -35 30;... % S Oc Tas - -67 60 -35 60;... % S Oc Tas - -67 90 -35 90;... % S Oc Tas - -65 -65 -55 -65;... % Drake - -67 147 -43.5 147;... % S Oc Tas - ]; -nSections = size(coord,1); - -% number of points to plot for each figure -nPoints = 300; - -% plotDepth(nSections) depth to which to plot each section, in m -plotDepth = 5000*ones(1,size(coord,1)); - -% For plotting, only four plots are allowed per row. -% Choose sections above for each page. -% page.name name of this group of sections -% sectionID section numbers for each row of this page -page(1).name = 'ACC'; -page(1).sectionID = [1:nSections]; - -% coord range may need alteration to match lonVertex: -% If lonVertex is between 0 and 2*pi, ensure the coordinate range is 0 to 360. -%coord(:,2) = mod(coord(:,2),360); -%coord(:,4) = mod(coord(:,4),360); -% If lonVertex is between -pi and pi, ensure the coordinate range is -180 to 180. -coord(:,2) = mod(coord(:,2)+180,360)-180; -coord(:,4) = mod(coord(:,4)+180,360)-180; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables to view -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% see exampleSections.m for more example variables - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -var_name = {... -'avgVelocityZonal',... -'avgVelocityMeridional',... -'keAvgVelocity'}; -var_conv_factor = [100 100 1]; % convert m/s to cm/s for velocities -var_lims = [-20 20 2.0; -10 10 1.0; 0 20 2.5]; - -var_name = {... -'temperature',... -}; -var_conv_factor = [1]; -var_lims = [-1 14 1]; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify actions to be taken -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -find_cell_weights_flag = true ; -plot_section_locations_flag = true ; -load_large_variables_flag = true ; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Begin main code. Normally this does not need to change. -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -for iSim = 1:length(sim) - - fprintf(['**** simulation: ' sim(iSim).dir '\n']) - fid_latex = fopen('temp.tex','w'); - fprintf(fid_latex,['%% file created by plot_mpas_cross_sections, ' date '\n\n']); - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Find cells that connect beginning and end points of section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if find_cell_weights_flag - [sim(iSim).cellsOnVertexSection, sim(iSim).cellWeightsSection, ... - sim(iSim).latSection,sim(iSim).lonSection, ... - refMidDepth, refBottomDepth, sim(iSim).maxLevelCellSection] ... - = find_cell_weights(wd,sim(iSim).dir,sim(iSim).netcdf_file, ... - sectionText,coord,nPoints); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot cell section locations on world map - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if plot_section_locations_flag - sub_plot_section_locations(sim(iSim).dir,coord, ... - sim(iSim).latSection,sim(iSim).lonSection,fid_latex) - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Load large variables from netcdf file - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if load_large_variables_flag - [sim(iSim).sectionData] = load_large_variables ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, var_name, var_conv_factor, ... - sim(iSim).cellsOnVertexSection, sim(iSim).cellWeightsSection); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot data on cross-sections - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - for iPage = 1:length(page) - - sub_plot_cross_sections(sim(iSim).dir,sim(iSim).netcdf_file,sectionText, ... - page(iPage).name, page(iPage).sectionID,sim(iSim).sectionData,refMidDepth,refBottomDepth,... - sim(iSim).latSection,sim(iSim).lonSection,sim(iSim).maxLevelCellSection,coord,plotDepth,... - var_name,var_lims,fid_latex) - - end % iPage - fprintf(fid_latex,['\n\\end{document}\n\n']); - fclose(fid_latex); - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Latex Compilation -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% This matlab script will invoke a latex compiler in order to -% produce a pdf file. Specify the unix command-line latex -% executable, or 'none' to not compile the latex document. - -% latex_command = 'latex'; - -% doc_dir = ['docs/' regexprep(sim(iSim).dir,'/','_') '_' ... -% sim(iSim).netcdf_file '_dir' ]; -% unix(['mkdir -p ' doc_dir '/f']); -% unix(['mv f/*jpg ' doc_dir '/f']); - -% filename = [ regexprep(sim(iSim).dir,'/','_') '_' sim(iSim).netcdf_file '.tex']; -% unix(['cat mpas_sections.head.tex temp.tex > ' doc_dir '/' filename ]); - -% if not(strcmp(latex_command,'none')) -% fprintf('*** Compiling latex document \n') -% cd(doc_dir); -% unix([latex_command ' ' filename]); -% cd('../..'); -% end - -end % iSim - - diff --git a/visualization/cross_section/exampleSections.m b/visualization/cross_section/exampleSections.m deleted file mode 100644 index e6302c10f..000000000 --- a/visualization/cross_section/exampleSections.m +++ /dev/null @@ -1,143 +0,0 @@ -% example_sections.m - -% This file simply contains example cross sections with text names. -% -% Mark Petersen, MPAS-Ocean Team, LANL, March 2014 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% sectionText a cell array with text describing each section -sectionText = { -'N Atlantic 26N lat',... -'N Atlantic 36N lat',... -'N Atlantic 41N lat',... -'N Atlantic 46N lat',... -'N Atlantic 56N lat',... -'N Atlantic 70W lon',... -'N Atlantic 65W lon',... -'N Atlantic 60W lon',... -'N Atlantic 50W lon',... -'N Atlantic 40W lon',... -'N Atlantic 30W lon',... -'Eq Pacific 140W lon',... -'Eq Pacific 0 lat ',... -'Drake Pass 65W lon',... -'ACC Tasman 147E lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - 26 -80 26 -15;... % N Atl Zonal - 36 -76 36 -10;... % N Atl Zonal - 41 -72 41 -10;... % N Atl Zonal - 46 -60 46 -10;... % N Atl Zonal - 56 -60 56 -10;... % N Atl Zonal - 20 -70 44 -70;... % N Atl Meridional - 19 -65 44 -65;... % N Atl Meridional - 8.5 -60 46 -60;... % N Atl Meridional - 1.8 -50 62 -50;... % N Atl Meridional - -3 -40 65 -40;... % N Atl Meridional - -5 -30 68.2 -30;... % N Atl Meridional - -8 -140 8 -140;... % Eq Pac Meridional - 0 140 0 -95;... % Eq Pac Zonal - -65 -65 -55 -65;... % Drake - -67 147 -43.5 147;... % S Oc Tas - ]; - - -% These are the cross-sections for the current plots: -% sectionText a cell array with text describing each section -sectionText = { -'N Atlantic 70W lon',... -'N Atlantic 65W lon',... -'N Atlantic 60W lon',... -'N Atlantic 50W lon',... -'N Atlantic 40W lon',... -'N Atlantic 30W lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - 20 -70 44 -70;... % N Atl Meridional - 19 -65 44 -65;... % N Atl Meridional - 8.5 -60 46 -60;... % N Atl Meridional - 1.8 -50 62 -50;... % N Atl Meridional - -3 -40 65 -40;... % N Atl Meridional - -5 -30 68.2 -30;... % N Atl Meridional - ]; - -sectionText = { -'N Atlantic 77W lon',... -'N Atlantic 76.4W lon',... -'N Atlantic 76W lon',... -'N Atlantic 75W lon',... -'N Atlantic 26N lat',... -'N Atlantic 26N lat',... - }; - -coord = [... - 21 283. 32 283.;... % DWBC N Atl meridional section - 21 283.6 32 283.6;... % DWBC N Atl meridional section - 21 284 32 284;... % DWBC N Atl meridional section - 21 285 32 285;... % DWBC N Atl meridional section - 26.5 -77.1 26.5 -75;... % DWBC N Atl zonal section - 26.5 -80 26.5 -14;... % DWBC N Atl zonal section - ]; - -% sectionText a cell array with text describing each section -sectionText = { -'ACC 0E lon',... -'ACC 30E lon',... -'ACC 60E lon',... -'ACC 90E lon',... -'Drake Pass 65W lon',... -'ACC Tasman 147E lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - -67 0 -43.5 0;... % S Oc Tas - -67 30 -43.5 30;... % S Oc Tas - -67 60 -43.5 60;... % S Oc Tas - -67 90 -43.5 90;... % S Oc Tas - -65 -65 -55 -65;... % Drake - -67 147 -43.5 147;... % S Oc Tas - ]; - - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables to view -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -var_name = {... -'avgVelocityZonal',... -'avgVelocityMeridional',... -'ke_fromAvgVelocity'}; -var_conv_factor = [100 100 1]; % convert m/s to cm/s for velocities -var_lims = [-20 20 2.0; -10 10 1.0; 0 20 2.5]; - -var_name = {... -'temperature',... -}; -var_conv_factor = [1]; -var_lims = [-2 12 .5]; diff --git a/visualization/cross_section/find_cell_weights.m b/visualization/cross_section/find_cell_weights.m deleted file mode 100644 index bf9779059..000000000 --- a/visualization/cross_section/find_cell_weights.m +++ /dev/null @@ -1,165 +0,0 @@ -function [cellsOnVertexSection, cellWeightsSection, latSection,lonSection, ... - refMidDepth, refBottomDepth, maxLevelCellSection] = find_cell_weights ... - (wd,dir,netcdf_file,sectionText,coord,nPoints) - -% This function reads grid data from an MPAS-Ocean grid or restart -% netCDF file, and finds a path of cells that connect the endpoints -% specified in coord. The path is forced to travel through cells -% that are closest to the line connecting the beginning and end -% cells. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% sectionText a cell array with text describing each section -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% -%%%%%%%%%% output arguments %%%%%%%%% -% cellsOnVertexSection(vertexDegree,nPoints,nSections) cells neighboring nearest vertex -% cellWeightsSection(vertexDegree,nPoints,nSections) weights for each cell -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% depth(nVertLevels) depth of center of each layer, for plotting -% latCellDeg(nCells) lat arrays for all cells -% lonCellDeg(nCells) lon arrays for all cells - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read data from file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** find_cell_sections, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ] -ncid = netcdf.open(filename,'nc_nowrite'); - -xCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'xCell')); -yCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'yCell')); -zCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'zCell')); -latVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latVertex')); -lonVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonVertex')); -xVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'xVertex')); -yVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'yVertex')); -zVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'zVertex')); -cellsOnVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'cellsOnVertex')); -refLayerThickness = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refLayerThickness')); -refBottomDepth = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refBottomDepth')); -maxLevelCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'maxLevelCell')); -sphere_radius = netcdf.getAtt(ncid,netcdf.getConstant('NC_GLOBAL'),'sphere_radius'); -[dimname,nVertices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertices')); -[dimname,vertexDegree]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'vertexDegree')); -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); -netcdf.close(ncid) - -nSections = size(coord,1); - -% Compute depth of center of each layer, for plotting -refMidDepth(1) = refLayerThickness(1)/2; -for i=2:nVertLevels - refMidDepth(i) = refMidDepth(i-1) + 0.5*(refLayerThickness(i) + refLayerThickness(i-1)); -end - -latSection = zeros(nPoints,nSections); -lonSection = zeros(nPoints,nSections); -latVertexSection = zeros(nPoints,nSections); -lonVertexSection = zeros(nPoints,nSections); -maxLevelCellSection = zeros(nPoints,nSections); -nearestVertexSection = zeros(nPoints,nSections); -cellsOnVertexSection = zeros(vertexDegree,nPoints,nSections); -cellWeightsSection = zeros(vertexDegree,nPoints,nSections); -margin=.5; - -for iSection=1:nSections - fprintf('Finding nearest vertex for Section %g \n',iSection) - latSection(:,iSection) = linspace(coord(iSection,1),coord(iSection,3),nPoints); - lonSection(:,iSection) = linspace(coord(iSection,2),coord(iSection,4),nPoints); - - maxLat = (max(latSection(:,iSection))+margin)*pi/180; - minLat = (min(latSection(:,iSection))-margin)*pi/180; - maxLon = (max(lonSection(:,iSection))+margin)*pi/180; - minLon = (min(lonSection(:,iSection))-margin)*pi/180; - - vInd = find(latVertex>minLat&latVertexminLon&lonVertex=cvalue(jnew))),nc)-1,1); - cmapnew(jnew-1,:) = cmap_orig_short(jold,:); - end - cmapnew(nc_inc,:) = cmap_orig_short(nc-1,:); - - colormap(cmapnew) - %colorbarf_spec(cout,h,'vert',contour_lims); - %xlabel('Distance (km) along 26.5N east of 77W') % for DWBC only - - axis tight - %set(gca,'YLim',[0 plotDepth(iSection)],'XLim',[0 175]) % for DWBC only - %set(gca,'YTick',[0:1000:5000],'XTick',[0:25:175]) - set(gca,'YLim',[0 plotDepth(iRow)]) - %set(gca,'YTick',[0:100:400]) - xlabel(xtext) - % set(gca,'XTick',-1*[80:.5:70]) - %%%%%% special commands for DWBC mrp end - - %%%%%% special commands for EUC mrp end - %if iRow==2 - % set(gca,'XTick',[143 156 165 180 190 205 220 235 250 265]) - % set(gca,'XTickLabel',{'143' '156' '165E' '180' '170W' '155' '140' '125' '110' '95'}) - %end - - %%%%%% special commands for EUC mrp end - - set(gca,'YDir','reverse') - title([temptext ', ' char(var_name(iVar))]) - ylabel('depth, m') - grid on - set(gca,'layer','top'); - h=colorbar; - - % mrp draw bottom based on zero contour - hold on - n = nPoints; - % old way: maxLevelCell=zeros(1,n); - x(2:n) = (xaxis(1:n-1)+xaxis(2:n))/2; - x(1) = xaxis(1) - (xaxis(2)-xaxis(1))/2; - x(n+1) = xaxis(n) + (xaxis(n)-xaxis(n-1))/2; - b = max(refBottomDepth); - for j=1:n - % old way: maxLevelCell(j)=max(min(find(sectionData(:,j,iSection,iVar)==0.0))-1,1); - depthline(j) = refBottomDepth(maxLevelCellSection(j,iSection)); - % do not draw land at bottom right now. - %h=patch([x(j) x(j+1) x(j+1) x(j) x(j)],... - % [b b depthline(j) depthline(j) b], [.5 .5 .5]); - %set(h,'LineStyle','none') - end - - % mrp draw bottom based on zero contour end - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 5.5 3.2]) - subplot('position',[0 .95 1 .05]); axis off - title_txt = [regexprep(char(var_name(iVar)),'_','\\_') ', ' regexprep(dir,'_','\\_')]; -% h=text(.55,.4,title_txt); -% set(h,'HorizontalAlignment','center','FontWeight','bold','FontSize',14) -% text(.005,.7,[ date ]); - - unix(['mkdir -p f/' dir ]); - temp=['f/' dir '/' netcdfFile '_' pageName num2str(iRow) '_var' num2str(iVar)]; - filename = regexprep(temp,'\.','_'); - print('-djpeg',[filename '.jpg']); - print('-depsc2',[filename '.eps']); - unix(['epstopdf ' filename '.eps --outfile=' filename '.pdf']); - fprintf(fid_latex,['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); - - % print('-depsc2',[filename '.eps']) - - end - - -end - diff --git a/visualization/cross_section/sub_plot_section_locations.m b/visualization/cross_section/sub_plot_section_locations.m deleted file mode 100644 index f69137f4e..000000000 --- a/visualization/cross_section/sub_plot_section_locations.m +++ /dev/null @@ -1,97 +0,0 @@ -function sub_plot_section_locations(dir,coord, ... - latSection,lonSection,fid_latex) - -% Plot section locations on world map - -% Mark Petersen, MPAS-Ocean Team, LANL, Sep 2012 - -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% fid_latex file ID of latex file - -fprintf(['** sub_plot_cell_sections, on figure 1.\n']) - -nSections = size(coord,1); - -figure(1); clf - - minLon = -180.0; % may be 0 or -180 - latTrans = 360.0; - - % plot topo data of the earth. This is just low-rez one deg - % data for visual reference. - load('topo.mat','topo','topomap1'); - if minLon==-180 - topoNew(:,1:180) = topo(:,181:360); - topoNew(:,181:360) = topo(:,1:180); - image([-180 180],[-90 90],topoNew,'CDataMapping', 'scaled'); - else - image([0 360],[-90 90],topo,'CDataMapping', 'scaled'); - end - - colormap(topomap1); - set(gca,'YDir','normal') - - hold on - - % world - axis tight - set(gca,'XTick',30*[-10:12]) - set(gca,'YTick',15*[-20:20]) - - % half world -% axis([-360+latTrans 0+latTrans -80 70]) -% set(gca,'XTick',20*[-10:20]) -% set(gca,'YTick',10*[-20:20]) - - % N Atlantic -% axis([-90+latTrans -5+latTrans -5 70]) -% set(gca,'XTick',[-100:5:360]) -% set(gca,'YTick',[-90:5:90]) - - % Drake passage -% axis([-90+latTrans,-50+latTrans,-75,-50]) -% set(gca,'XTick',[-100:2:360]) - % set(gca,'YTick',[-200:2:200]) - - % Pacific -% axis([130 260 -10 10]) -% set(gca,'XTick',[0:1:300]) -% set(gca,'YTick',[-20:.1:20]) - - hold on - grid on - - for iSection=1:nSections - h=plot(lonSection(:,iSection),latSection(:,iSection),'y-'); - h=text(lonSection(1,iSection),latSection(1,iSection), ... - num2str(iSection)); - - set(h,'Color',[1 1 1],'FontWeight','bold') - %h=plot(lonSection(:,iSection),latSection(:,iSection),'y.'); - %set(h,'Color','y','LineWidth',1) - end - - ylabel('latitude') - xlabel('longitude') - title(['Domain: ' regexprep(dir,'_','\\_') ' Cells in cross sections. ']) - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 8 8]) - - subplot('position',[0 .95 1 .05]); axis off - text(.005,.7,[ date ]); - - dir_name1 = regexprep(dir,'\.','_'); - dir_name2 = regexprep(dir_name1,'/','_'); - filename=['f/' dir_name2 '_cell_map' ]; - print('-djpeg',[filename '.jpg']) - - % put printing text in a latex file - fprintf(fid_latex,... - ['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); diff --git a/visualization/cross_section/triArea.m b/visualization/cross_section/triArea.m deleted file mode 100644 index 8734c8dc1..000000000 --- a/visualization/cross_section/triArea.m +++ /dev/null @@ -1,24 +0,0 @@ -function area=triArea(A,B,C,R) -% - This function calculates the area of the triangle A,B,C on the -% surface of a sphere. -% -% Input: A, B, C -% A: vertex 1 of triangle -% B: vertex 2 of triangle -% C: vertex 3 of triangle -% R: radius of sphere -% Output: (returned value area) -% area: surface area of triangle on sphere. - -R2inv = 1/R/R; - -a = acos(dot(B,C)*R2inv); -b = acos(dot(C,A)*R2inv); -c = acos(dot(A,B)*R2inv); - -s = 0.5*(a+b+c); - -tanqe = sqrt(tan(0.5*s)*tan(0.5*(s-a))*tan(0.5*(s-b))*tan(0.5*(s-c))); - -area = abs(4.0*atan(tanqe)); - diff --git a/visualization/dx/edge.dx b/visualization/dx/edge.dx deleted file mode 100644 index 9fe861ba3..000000000 --- a/visualization/dx/edge.dx +++ /dev/null @@ -1,18 +0,0 @@ -object "positions list" class array type float rank 1 shape 3 items 120000 -ascii data file edge.position.data - -object 0 class array type float rank 1 shape 3 items 120000 -ascii data file normal.data -attribute "dep" string "positions" - -object 1 class array type float rank 1 shape 3 items 120000 -ascii data file tangent.data -attribute "dep" string "positions" - -object "normal" class field -component "positions" "positions list" -component "data" 0 - -object "tangent" class field -component "positions" "positions list" -component "data" 1 diff --git a/visualization/dx/triangle.dx b/visualization/dx/triangle.dx deleted file mode 100644 index c1b454a2d..000000000 --- a/visualization/dx/triangle.dx +++ /dev/null @@ -1,172 +0,0 @@ - -object "positions list" class array type float rank 1 shape 3 items 240000 -ascii data file tri.position.data - -object "edge list" class array type int rank 0 items 240000 -ascii data file tri.edge.data -attribute "ref" string "positions" - -object "loops list" class array type int rank 0 items 80000 -ascii data file tri.loop.data -attribute "ref" string "edges" - -object "face list" class array type int rank 0 items 80000 -ascii data file tri.face.data -attribute "ref" string "loops" - -object 0 class array type float rank 0 items 80000 -data file tri.area.data -attribute "dep" string "faces" - -object 1 class array type float rank 0 items 80000 -data file ./output/divDT.data -attribute "dep" string "faces" - -object 2 class array type float rank 0 items 80000 -data file ./output/ke.data -attribute "dep" string "faces" - -object 3 class array type float rank 0 items 80000 -data file ./output/relDT.data -attribute "dep" string "faces" - -object 4 class array type float rank 0 items 80000 -data file ./output/thicknessc.data -attribute "dep" string "faces" - -object 5 class array type float rank 0 items 80000 -data file ./output/thicknesscdiff.data -attribute "dep" string "faces" - -object 6 class array type float rank 0 items 80000 -data file ./output/kediff.data -attribute "dep" string "faces" - -object 7 class array type float rank 0 items 80000 -data file ./output/relDTdiff.data -attribute "dep" string "faces" - -object 8 class array type float rank 0 items 80000 -data file ./output/residDT.data -attribute "dep" string "faces" - -object 9 class array type float rank 0 items 80000 -data file ./output/chi.data -attribute "dep" string "faces" - -object 10 class array type float rank 0 items 80000 -data file ./output/divAVG.data -attribute "dep" string "faces" - -object 11 class array type float rank 0 items 80000 -data file ./output/beta.data -attribute "dep" string "faces" - -object 12 class array type float rank 0 items 80000 -data file ./output/jtw.data -attribute "dep" string "faces" - -object 13 class array type float rank 0 items 80000 -data file ./output/pv.data -attribute "dep" string "faces" - - - -object "areac" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 0 - -object "divDT" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 1 - -object "ke" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 2 - -object "relDT" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 3 - -object "thicknessc" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 4 - -object "thicknesscdiff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 5 - -object "kediff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 6 - -object "relDTdiff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 7 - -object "residDT" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 8 - -object "chi" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 9 - -object "divAVG" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 10 - -object "beta" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 11 - -object "jtw" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 12 - -object "pv" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 13 - diff --git a/visualization/dx/visual_vector.cfg b/visualization/dx/visual_vector.cfg deleted file mode 100644 index 8b8de65b5..000000000 --- a/visualization/dx/visual_vector.cfg +++ /dev/null @@ -1,266 +0,0 @@ -// -// time: Fri Aug 7 15:29:44 2009 -// -// version: 3.2.0 (format), 4.4.4 (DX) -// -// inaccessible panels: -// inaccessible groups: -// -// panel[0]: position = (0.0156,0.7275), size = 0.2375x0.1546, startup = 0, devstyle = 1, screen = 0 -// title: value = VD String -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[1]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[2]: position = (0.0437,0.0539), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[3]: position = (0.1344,0.2155), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[4]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[5]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[6]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[7]: position = (0.0051,0.0349), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[8]: position = (0.0051,0.0349), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[9]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[11]: position = (0.0051,0.0070), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[12]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[13]: position = (0.1562,0.5894), size = 0.1738x0.1629, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[14]: position = (0.0051,0.0209), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[15]: position = (0.0281,0.1356), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[16]: position = (0.2605,0.4563), size = 0.3328x0.4880, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[17]: position = (0.0059,0.4930), size = 0.2746x0.4392, startup = 1, devstyle = 1, screen = 0 -// title: value = Collected -// -// workspace: width = 430, height = 471 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// panel[18]: position = (0.2535,0.2668), size = 0.1953x0.3821, startup = 0, devstyle = 1, screen = 0 -// title: value = Control Panel -// -// workspace: width = 500, height = 500 -// layout: snap = 0, width = 50, height = 50, align = NN -// -// interactor FileSelector[2]: num_components = 1, value = "/Users/todd/Desktop/cgrid_model/run/dx/normals.dx" -// instance: panel = 2, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// instance: panel = 17, x = 0, y = 416, style = FileSelector, vertical = 1, size = 180x55 -// filter = /Users/todd/Desktop/cgrid_model/run/dx/*.dx -// -// interactor String[2]: num_components = 1, value = "velocity" -// instance: panel = 3, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// instance: panel = 17, x = 0, y = 134, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[5]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 9, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 300, y = 0, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[1]: num_components = 1, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/voronoi.dx" -// instance: panel = 1, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// instance: panel = 17, x = 4, y = 289, style = FileSelector, vertical = 1, size = 180x55 -// filter = /Users/todd/Desktop/svn/cvt/unstructured/run/40962.A.Cgrid/dx/*.dx -// -// interactor String[1]: num_components = 1, value = "ke.1.0" -// instance: panel = 0, x = 4, y = 14, style = Text, vertical = 1, size = 119x51 -// instance: panel = 16, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// instance: panel = 17, x = 0, y = 0, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[1]: num_components = 1, value = 1 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 8, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 150, y = 0, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// node Colormap[1]: -// input[1]: defaulting = 0, value = { [0.0 0.74683544] [1.0 0.0] } -// input[2]: defaulting = 0, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } -// input[3]: defaulting = 0, value = { [0.84699454 1.0] } -// input[4]: defaulting = 0, value = { [0.84972678 1.0] } -// input[5]: defaulting = 0, value = "Colormap_1" -// input[7]: defaulting = 1, value = 0.0 -// input[8]: defaulting = 1, value = 80737.0 -// input[9]: defaulting = 1, value = 20 -// input[12]: defaulting = 0, value = { 0.0 80737.0 } -// input[17]: defaulting = 0, value = 0.0 -// input[18]: defaulting = 0, value = 80737.0 -// window: position = (0.0605,0.4696), size = 0.4297x0.4284, screen = 0 -// -// interactor String[7]: num_components = 1, value = "white" -// instance: panel = 18, x = 5, y = 5, style = Text, vertical = 1, size = 126x51 -// instance: panel = 17, x = 304, y = 136, style = Text, vertical = 1, size = 126x51 -// -// interactor Integer[7]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 7, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[4]: num_components = 1, value = "/Users/todd/Desktop/svn/cvt/unstructured/run/40962.A.Dgrid/dx/topography.dx" -// instance: panel = 5, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// -// interactor String[4]: num_components = 1, value = "tpg" -// instance: panel = 6, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[6]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 4, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 147, y = 134, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor Integer[10]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 14, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 300, y = 65, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// interactor FileSelector[6]: num_components = 1, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/triangle.dx" -// instance: panel = 12, x = 5, y = 5, style = FileSelector, vertical = 1, size = 180x55 -// instance: panel = 17, x = 0, y = 350, style = FileSelector, vertical = 1, size = 180x55 -// filter = /Users/todd/Desktop/cgrid_model/run/dx/*.dx -// -// interactor String[6]: num_components = 1, value = "vorticity.1.12" -// instance: panel = 13, x = 5, y = 5, style = Text, vertical = 1, size = 119x51 -// instance: panel = 17, x = 1, y = 66, style = Text, vertical = 1, size = 119x51 -// -// interactor Integer[9]: num_components = 1, value = 0 -// component[0]: minimum = -1e+06, maximum = 1e+06, global increment = 1, decimal = 0, global continuous = 0 -// instance: panel = 15, x = 5, y = 5, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// instance: panel = 17, x = 150, y = 69, style = Stepper, vertical = 1, size = 121x54 -// local continuous: value = 0, mode = global -// local increment[0]: value = 1, mode = global -// -// node Colormap[2]: -// input[1]: defaulting = 0, value = { [0.0 0.74683544] [1.0 0.0] } -// input[2]: defaulting = 0, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } -// input[3]: defaulting = 0, value = { [0.84699454 1.0] } -// input[4]: defaulting = 0, value = { [0.84972678 1.0] } -// input[5]: defaulting = 0, value = "Colormap_2" -// input[7]: defaulting = 1, value = -0.22230303 -// input[8]: defaulting = 1, value = 72107.0 -// input[9]: defaulting = 1, value = 20 -// input[12]: defaulting = 0, value = { -0.22230303 72107.0 } -// input[17]: defaulting = 0, value = -0.22230303 -// input[18]: defaulting = 0, value = 72107.0 -// window: position = (0.0605,0.4696), size = 0.4297x0.4284, screen = 0 -// -// node Image[2]: -// depth: value = 24 -// window: position = (0.2855,0.0399), size = 0.5680x0.8980, screen = 0 -// input[1]: defaulting = 0, value = "Image_2" -// input[4]: defaulting = 0, value = 1 -// input[5]: defaulting = 0, value = [0 0 0] -// input[6]: defaulting = 0, value = [0 -6.8363 0] -// input[7]: defaulting = 0, value = 2.50853 -// input[8]: defaulting = 0, value = 1440 -// input[9]: defaulting = 0, value = 0.953 -// input[10]: defaulting = 0, value = [0 0 1] -// input[11]: defaulting = 1, value = 20.793 -// input[12]: defaulting = 0, value = 0 -// input[14]: defaulting = 0, value = 1 -// input[15]: defaulting = 1, value = "none" -// input[16]: defaulting = 1, value = "none" -// input[17]: defaulting = 1, value = 1 -// input[18]: defaulting = 1, value = 1 -// input[19]: defaulting = 0, value = 0 -// input[22]: defaulting = 0, value = "black" -// input[25]: defaulting = 0, value = "/Users/todd/Desktop/ke.tiff" -// input[26]: defaulting = 0, value = "tiff gamma=1" -// input[27]: defaulting = 0, value = 750 -// input[28]: defaulting = 1, value = 1.0 -// input[29]: defaulting = 0, value = 0 -// input[30]: defaulting = 0, value = {"x axis", "y axis", ""} -// input[31]: defaulting = 0, value = { -15 -15 15 } -// input[34]: defaulting = 0, value = 1 -// input[37]: defaulting = 0, value = {"grey30", "grey5", "yellow", "white"} -// input[38]: defaulting = 0, value = {"background", "grid", "ticks", "labels"} -// input[39]: defaulting = 0, value = 0.5 -// input[41]: defaulting = 0, value = "none" -// internal caching: 1 diff --git a/visualization/dx/visual_vector.net b/visualization/dx/visual_vector.net deleted file mode 100644 index efddd0d38..000000000 --- a/visualization/dx/visual_vector.net +++ /dev/null @@ -1,1238 +0,0 @@ -// -// time: Wed Sep 9 16:22:00 2009 -// -// version: 3.2.0 (format), 4.4.4 (DX) -// -// -// MODULE main -// workspace: width = 999, height = 1204 -// layout: snap = 0, width = 50, height = 50, align = NN -// -macro main( -) -> ( -) { - // - // node FileSelector[2]: x = 414, y = 91, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/edge.dx" - // output[2]: visible = 1, type = 32, value = "edge.dx" - // - // - // node String[2]: x = 554, y = 91, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "normal" - // - // - // node Import[2]: x = 501, y = 182, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "geodesic.c40962.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "isurf" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_2_out_1 = - Import( - main_FileSelector_2_out_1, - main_String_2_out_1, - main_Import_2_in_3, - main_Import_2_in_4, - main_Import_2_in_5, - main_Import_2_in_6 - ) [instance: 2, cache: 1]; - // - // node AutoGlyph[2]: x = 483, y = 261, inputs = 7, label = AutoGlyph - // input[2]: defaulting = 0, visible = 1, type = 32, value = "standard" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // input[4]: defaulting = 0, visible = 1, type = 5, value = 0.1 - // input[5]: defaulting = 1, visible = 1, type = 5, value = 0.1 - // -main_AutoGlyph_2_out_1 = - AutoGlyph( - main_Import_2_out_1, - main_AutoGlyph_2_in_2, - main_AutoGlyph_2_in_3, - main_AutoGlyph_2_in_4, - main_AutoGlyph_2_in_5, - main_AutoGlyph_2_in_6, - main_AutoGlyph_2_in_7 - ) [instance: 2, cache: 1]; - // - // node Integer[5]: x = 61, y = 420, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_5" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node FileSelector[1]: x = 66, y = 17, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/voronoi.dx" - // output[2]: visible = 1, type = 32, value = "voronoi.dx" - // - // - // node String[1]: x = 187, y = 20, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "area" - // - // - // node Import[1]: x = 120, y = 93, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_1_out_1 = - Import( - main_FileSelector_1_out_1, - main_String_1_out_1, - main_Import_1_in_3, - main_Import_1_in_4, - main_Import_1_in_5, - main_Import_1_in_6 - ) [instance: 1, cache: 1]; - // - // node ShowConnections[2]: x = 65, y = 261, inputs = 1, label = ShowConnections - // -main_ShowConnections_2_out_1 = - ShowConnections( - main_Import_1_out_1 - ) [instance: 2, cache: 1]; - // - // node Color[3]: x = 102, y = 345, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "yellow" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_3_out_1 = - Color( - main_ShowConnections_2_out_1, - main_Color_3_in_2, - main_Color_3_in_3, - main_Color_3_in_4, - main_Color_3_in_5 - ) [instance: 3, cache: 1]; - // - // node Switch[7]: x = 138, y = 463, inputs = 2, label = Switch - // -main_Switch_7_out_1 = - Switch( - main_Integer_5_out_1, - main_Color_3_out_1 - ) [instance: 7, cache: 1]; - // - // node Integer[1]: x = 229, y = 448, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_1" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 0 - // - // - // node Colormap[1]: x = 254, y = 167, inputs = 19, label = Colormap - // input[1]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 0.74683544] [1.0 0.0] } - // input[2]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } - // input[3]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84699454 1.0] } - // input[4]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84972678 1.0] } - // input[5]: defaulting = 0, visible = 0, type = 32, value = "Colormap_1" - // input[7]: defaulting = 1, visible = 0, type = 5, value = 866025.38 - // input[8]: defaulting = 1, visible = 0, type = 5, value = 866025.38 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 20 - // input[12]: defaulting = 0, visible = 0, type = 16777221, value = { 866025.38 866025.38 } - // input[17]: defaulting = 0, visible = 0, type = 5, value = 866025.38 - // input[18]: defaulting = 0, visible = 0, type = 5, value = 866025.38 - // window: position = (0.0605,0.4696), size = 0.4297x0.4284, screen = 0 - // -main_Colormap_1_out_1[cache: 2], -main_Colormap_1_out_2[cache: 2] = - Colormap( - main_Colormap_1_in_1, - main_Colormap_1_in_2, - main_Colormap_1_in_3, - main_Colormap_1_in_4, - main_Colormap_1_in_5, - main_Import_1_out_1, - main_Colormap_1_in_7, - main_Colormap_1_in_8, - main_Colormap_1_in_9, - main_Colormap_1_in_10, - main_Colormap_1_in_11, - main_Colormap_1_in_12, - main_Colormap_1_in_13, - main_Colormap_1_in_14, - main_Colormap_1_in_15, - main_Colormap_1_in_16, - main_Colormap_1_in_17, - main_Colormap_1_in_18, - main_Colormap_1_in_19 - ) [instance: 1, cache: 1]; - // - // node String[7]: x = 9, y = 180, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "white" - // - // - // node ColorBar[1]: x = 65, y = 176, inputs = 16, label = ColorBar - // input[2]: defaulting = 0, visible = 1, type = 8, value = [0.05 0.15] - // input[3]: defaulting = 0, visible = 1, type = 8, value = [200 15] - // input[4]: defaulting = 0, visible = 1, type = 3, value = 0 - // input[9]: defaulting = 1, visible = 1, type = 16777248, value = {"white"} - // -main_ColorBar_1_out_1 = - ColorBar( - main_Colormap_1_out_1, - main_ColorBar_1_in_2, - main_ColorBar_1_in_3, - main_ColorBar_1_in_4, - main_ColorBar_1_in_5, - main_ColorBar_1_in_6, - main_ColorBar_1_in_7, - main_ColorBar_1_in_8, - main_String_7_out_1, - main_ColorBar_1_in_10, - main_ColorBar_1_in_11, - main_ColorBar_1_in_12, - main_ColorBar_1_in_13, - main_ColorBar_1_in_14, - main_ColorBar_1_in_15, - main_ColorBar_1_in_16 - ) [instance: 1, cache: 1]; - // - // node Color[5]: x = 275, y = 280, inputs = 5, label = Color - // input[2]: defaulting = 1, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_5_out_1 = - Color( - main_Import_1_out_1, - main_Colormap_1_out_1, - main_Color_5_in_3, - main_Color_5_in_4, - main_Color_5_in_5 - ) [instance: 5, cache: 1]; - // - // node Collect[3]: x = 245, y = 353, inputs = 2, label = Collect - // -main_Collect_3_out_1 = - Collect( - main_ColorBar_1_out_1, - main_Color_5_out_1 - ) [instance: 3, cache: 1]; - // - // node Switch[1]: x = 309, y = 436, inputs = 2, label = Switch - // -main_Switch_1_out_1 = - Switch( - main_Integer_1_out_1, - main_Collect_3_out_1 - ) [instance: 1, cache: 1]; - // - // node Integer[7]: x = 71, y = 1096, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_7" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 0 - // - // - // node FileSelector[4]: x = 61, y = 768, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "/Users/todd/Desktop/svn/cvt/unstructured/run/40962.A.Dgrid/dx/topography.dx" - // output[2]: visible = 1, type = 32, value = "topography.dx" - // - // - // node String[4]: x = 180, y = 771, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "tpg" - // - // - // node Import[4]: x = 114, y = 844, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_4_out_1 = - Import( - main_FileSelector_4_out_1, - main_String_4_out_1, - main_Import_4_in_3, - main_Import_4_in_4, - main_Import_4_in_5, - main_Import_4_in_6 - ) [instance: 4, cache: 1]; - // - // node ShowConnections[3]: x = 97, y = 940, inputs = 1, label = ShowConnections - // -main_ShowConnections_3_out_1 = - ShowConnections( - main_Import_4_out_1 - ) [instance: 3, cache: 1]; - // - // node Color[8]: x = 134, y = 1024, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_8_out_1 = - Color( - main_ShowConnections_3_out_1, - main_Color_8_in_2, - main_Color_8_in_3, - main_Color_8_in_4, - main_Color_8_in_5 - ) [instance: 8, cache: 1]; - // - // node Tube[3]: x = 248, y = 1080, inputs = 4, label = Tube - // input[2]: defaulting = 0, visible = 1, type = 5, value = 0.01 - // -main_Tube_3_out_1 = - Tube( - main_Color_8_out_1, - main_Tube_3_in_2, - main_Tube_3_in_3, - main_Tube_3_in_4 - ) [instance: 3, cache: 1]; - // - // node Switch[9]: x = 170, y = 1142, inputs = 2, label = Switch - // -main_Switch_9_out_1 = - Switch( - main_Integer_7_out_1, - main_Tube_3_out_1 - ) [instance: 9, cache: 1]; - // - // node Integer[6]: x = 416, y = 363, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_6" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node Color[7]: x = 511, y = 335, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "black" - // -main_Color_7_out_1 = - Color( - main_AutoGlyph_2_out_1, - main_Color_7_in_2, - main_Color_7_in_3, - main_Color_7_in_4, - main_Color_7_in_5 - ) [instance: 7, cache: 1]; - // - // node Switch[8]: x = 493, y = 406, inputs = 2, label = Switch - // -main_Switch_8_out_1 = - Switch( - main_Integer_6_out_1, - main_Color_7_out_1 - ) [instance: 8, cache: 1]; - // - // node Integer[10]: x = 699, y = 470, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_10" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node FileSelector[6]: x = 704, y = 67, inputs = 0, label = FileSelector - // output[1]: visible = 1, type = 32, value = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/triangle.dx" - // output[2]: visible = 1, type = 32, value = "triangle.dx" - // - // - // node String[6]: x = 824, y = 70, inputs = 0, label = String - // output[1]: visible = 1, type = 32, value = "areac" - // - // - // node Import[6]: x = 758, y = 143, inputs = 6, label = Import - // input[1]: defaulting = 1, visible = 1, type = 32, value = "/disk5/gdc/swm/run.02562/dx/hexagon.dx" - // input[2]: defaulting = 1, visible = 1, type = 32, value = "center_area" - // input[3]: defaulting = 0, visible = 1, type = 32, value = "dx" - // -main_Import_6_out_1 = - Import( - main_FileSelector_6_out_1, - main_String_6_out_1, - main_Import_6_in_3, - main_Import_6_in_4, - main_Import_6_in_5, - main_Import_6_in_6 - ) [instance: 6, cache: 1]; - // - // node ShowConnections[4]: x = 703, y = 311, inputs = 1, label = ShowConnections - // -main_ShowConnections_4_out_1 = - ShowConnections( - main_Import_6_out_1 - ) [instance: 4, cache: 1]; - // - // node Color[11]: x = 740, y = 395, inputs = 5, label = Color - // input[2]: defaulting = 0, visible = 1, type = 32, value = "yellow" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_11_out_1 = - Color( - main_ShowConnections_4_out_1, - main_Color_11_in_2, - main_Color_11_in_3, - main_Color_11_in_4, - main_Color_11_in_5 - ) [instance: 11, cache: 1]; - // - // node Switch[12]: x = 776, y = 513, inputs = 2, label = Switch - // -main_Switch_12_out_1 = - Switch( - main_Integer_10_out_1, - main_Color_11_out_1 - ) [instance: 12, cache: 1]; - // - // node Integer[9]: x = 867, y = 498, inputs = 11, label = Integer - // input[1]: defaulting = 0, visible = 0, type = 32, value = "Integer_9" - // input[3]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 1, visible = 0, type = 1, value = -1000000 - // input[6]: defaulting = 1, visible = 0, type = 1, value = 1000000 - // input[7]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 0 - // output[1]: visible = 1, type = 1, value = 1 - // - // - // node Colormap[2]: x = 892, y = 217, inputs = 19, label = Colormap - // input[1]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 0.74683544] [1.0 0.0] } - // input[2]: defaulting = 0, visible = 0, type = 16777224, value = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] } - // input[3]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84699454 1.0] } - // input[4]: defaulting = 0, visible = 0, type = 16777224, value = { [0.84972678 1.0] } - // input[5]: defaulting = 0, visible = 0, type = 32, value = "Colormap_2" - // input[7]: defaulting = 1, visible = 0, type = 5, value = 433012.69 - // input[8]: defaulting = 1, visible = 0, type = 5, value = 433012.69 - // input[9]: defaulting = 1, visible = 0, type = 1, value = 20 - // input[12]: defaulting = 0, visible = 0, type = 16777221, value = { 433012.69 433012.69 } - // input[17]: defaulting = 0, visible = 0, type = 5, value = 433012.69 - // input[18]: defaulting = 0, visible = 0, type = 5, value = 433012.69 - // window: position = (0.0605,0.4696), size = 0.4297x0.4284, screen = 0 - // -main_Colormap_2_out_1[cache: 2], -main_Colormap_2_out_2[cache: 2] = - Colormap( - main_Colormap_2_in_1, - main_Colormap_2_in_2, - main_Colormap_2_in_3, - main_Colormap_2_in_4, - main_Colormap_2_in_5, - main_Import_6_out_1, - main_Colormap_2_in_7, - main_Colormap_2_in_8, - main_Colormap_2_in_9, - main_Colormap_2_in_10, - main_Colormap_2_in_11, - main_Colormap_2_in_12, - main_Colormap_2_in_13, - main_Colormap_2_in_14, - main_Colormap_2_in_15, - main_Colormap_2_in_16, - main_Colormap_2_in_17, - main_Colormap_2_in_18, - main_Colormap_2_in_19 - ) [instance: 2, cache: 1]; - // - // node ColorBar[3]: x = 701, y = 226, inputs = 16, label = ColorBar - // input[2]: defaulting = 0, visible = 1, type = 8, value = [0.05 0.075] - // input[3]: defaulting = 0, visible = 1, type = 8, value = [200 15] - // input[4]: defaulting = 0, visible = 1, type = 3, value = 0 - // input[9]: defaulting = 1, visible = 1, type = 16777248, value = {"white"} - // input[11]: defaulting = 0, visible = 0, type = 5, value = 1.5 - // -main_ColorBar_3_out_1 = - ColorBar( - main_Colormap_2_out_1, - main_ColorBar_3_in_2, - main_ColorBar_3_in_3, - main_ColorBar_3_in_4, - main_ColorBar_3_in_5, - main_ColorBar_3_in_6, - main_ColorBar_3_in_7, - main_ColorBar_3_in_8, - main_String_7_out_1, - main_ColorBar_3_in_10, - main_ColorBar_3_in_11, - main_ColorBar_3_in_12, - main_ColorBar_3_in_13, - main_ColorBar_3_in_14, - main_ColorBar_3_in_15, - main_ColorBar_3_in_16 - ) [instance: 3, cache: 1]; - // - // node Color[10]: x = 913, y = 330, inputs = 5, label = Color - // input[2]: defaulting = 1, visible = 1, type = 32, value = "black" - // input[3]: defaulting = 0, visible = 1, type = 5, value = 1.0 - // -main_Color_10_out_1 = - Color( - main_Import_6_out_1, - main_Colormap_2_out_1, - main_Color_10_in_3, - main_Color_10_in_4, - main_Color_10_in_5 - ) [instance: 10, cache: 1]; - // - // node Collect[4]: x = 883, y = 403, inputs = 2, label = Collect - // -main_Collect_4_out_1 = - Collect( - main_ColorBar_3_out_1, - main_Color_10_out_1 - ) [instance: 4, cache: 1]; - // - // node Switch[11]: x = 947, y = 486, inputs = 2, label = Switch - // -main_Switch_11_out_1 = - Switch( - main_Integer_9_out_1, - main_Collect_4_out_1 - ) [instance: 11, cache: 1]; - // - // node Collect[2]: x = 406, y = 576, inputs = 7, label = Collect - // -main_Collect_2_out_1 = - Collect( - main_Switch_7_out_1, - main_Switch_1_out_1, - main_Switch_9_out_1, - main_Switch_8_out_1, - main_Collect_2_in_5, - main_Switch_12_out_1, - main_Switch_11_out_1 - ) [instance: 2, cache: 1]; - // - // node ColorBar[2]: x = 354, y = 928, inputs = 16, label = ColorBar - // input[2]: defaulting = 0, visible = 1, type = 8, value = [0.05 0.225] - // input[3]: defaulting = 0, visible = 1, type = 8, value = [200 15] - // input[4]: defaulting = 0, visible = 1, type = 3, value = 0 - // input[9]: defaulting = 0, visible = 0, type = 16777248, value = {"white"} - // -main_ColorBar_2_out_1 = - ColorBar( - main_ColorBar_2_in_1, - main_ColorBar_2_in_2, - main_ColorBar_2_in_3, - main_ColorBar_2_in_4, - main_ColorBar_2_in_5, - main_ColorBar_2_in_6, - main_ColorBar_2_in_7, - main_ColorBar_2_in_8, - main_ColorBar_2_in_9, - main_ColorBar_2_in_10, - main_ColorBar_2_in_11, - main_ColorBar_2_in_12, - main_ColorBar_2_in_13, - main_ColorBar_2_in_14, - main_ColorBar_2_in_15, - main_ColorBar_2_in_16 - ) [instance: 2, cache: 1]; - // - // node Image[2]: x = 453, y = 651, inputs = 49, label = Image - // input[1]: defaulting = 0, visible = 0, type = 67108863, value = "Image_2" - // input[4]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[5]: defaulting = 0, visible = 0, type = 8, value = [1855.82 170889 0] - // input[6]: defaulting = 0, visible = 0, type = 8, value = [1855.82 170889 524001] - // input[7]: defaulting = 0, visible = 0, type = 5, value = 8190.35 - // input[8]: defaulting = 0, visible = 0, type = 1, value = 1440 - // input[9]: defaulting = 0, visible = 0, type = 5, value = 0.955 - // input[10]: defaulting = 0, visible = 0, type = 8, value = [0 1 0] - // input[11]: defaulting = 1, visible = 0, type = 5, value = 0.895538 - // input[12]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[14]: defaulting = 0, visible = 0, type = 1, value = 1 - // input[15]: defaulting = 1, visible = 0, type = 32, value = "none" - // input[16]: defaulting = 1, visible = 0, type = 32, value = "none" - // input[17]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[18]: defaulting = 1, visible = 0, type = 1, value = 1 - // input[19]: defaulting = 0, visible = 0, type = 1, value = 0 - // input[22]: defaulting = 0, visible = 0, type = 32, value = "black" - // input[25]: defaulting = 0, visible = 0, type = 32, value = "/Users/todd/Desktop/ke.tiff" - // input[26]: defaulting = 0, visible = 0, type = 32, value = "tiff gamma=1" - // input[27]: defaulting = 0, visible = 0, type = 1, value = 750 - // input[28]: defaulting = 1, visible = 0, type = 5, value = 1.0 - // input[29]: defaulting = 0, visible = 0, type = 3, value = 0 - // input[30]: defaulting = 0, visible = 0, type = 16777248, value = {"x axis", "y axis", ""} - // input[31]: defaulting = 0, visible = 0, type = 16777217, value = { -15 -15 15 } - // input[34]: defaulting = 0, visible = 0, type = 3, value = 1 - // input[37]: defaulting = 0, visible = 0, type = 16777248, value = {"grey30", "grey5", "yellow", "white"} - // input[38]: defaulting = 0, visible = 0, type = 16777248, value = {"background", "grid", "ticks", "labels"} - // input[39]: defaulting = 0, visible = 0, type = 5, value = 0.5 - // input[41]: defaulting = 0, visible = 0, type = 32, value = "none" - // depth: value = 24 - // window: position = (0.2492,0.0760), size = 0.5680x0.8980, screen = 0 - // internal caching: 1 - // -main_Image_2_out_1, -main_Image_2_out_2, -main_Image_2_out_3 = - Image( - main_Image_2_in_1, - main_Collect_2_out_1, - main_Image_2_in_3, - main_Image_2_in_4, - main_Image_2_in_5, - main_Image_2_in_6, - main_Image_2_in_7, - main_Image_2_in_8, - main_Image_2_in_9, - main_Image_2_in_10, - main_Image_2_in_11, - main_Image_2_in_12, - main_Image_2_in_13, - main_Image_2_in_14, - main_Image_2_in_15, - main_Image_2_in_16, - main_Image_2_in_17, - main_Image_2_in_18, - main_Image_2_in_19, - main_Image_2_in_20, - main_Image_2_in_21, - main_Image_2_in_22, - main_Image_2_in_23, - main_Image_2_in_24, - main_Image_2_in_25, - main_Image_2_in_26, - main_Image_2_in_27, - main_Image_2_in_28, - main_Image_2_in_29, - main_Image_2_in_30, - main_Image_2_in_31, - main_Image_2_in_32, - main_Image_2_in_33, - main_Image_2_in_34, - main_Image_2_in_35, - main_Image_2_in_36, - main_Image_2_in_37, - main_Image_2_in_38, - main_Image_2_in_39, - main_Image_2_in_40, - main_Image_2_in_41, - main_Image_2_in_42, - main_Image_2_in_43, - main_Image_2_in_44, - main_Image_2_in_45, - main_Image_2_in_46, - main_Image_2_in_47, - main_Image_2_in_48, - main_Image_2_in_49 - ) [instance: 2, cache: 1]; - // - // node Tube[2]: x = 11, y = 345, inputs = 4, label = Tube - // input[2]: defaulting = 1, visible = 1, type = 5, value = 0.0025 - // input[3]: defaulting = 0, visible = 0, type = 1, value = 16 - // -main_Tube_2_out_1 = - Tube( - main_Tube_2_in_1, - main_Tube_2_in_2, - main_Tube_2_in_3, - main_Tube_2_in_4 - ) [instance: 2, cache: 1]; -// network: end of macro body -CacheScene(main_Image_2_in_1, main_Image_2_out_1, main_Image_2_out_2); -} -main_FileSelector_2_out_1 = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/edge.dx"; -main_String_2_out_1 = "normal"; -main_Import_2_in_3 = "dx"; -main_Import_2_in_4 = NULL; -main_Import_2_in_5 = NULL; -main_Import_2_in_6 = NULL; -main_Import_2_out_1 = NULL; -main_AutoGlyph_2_in_2 = "standard"; -main_AutoGlyph_2_in_3 = 1.0; -main_AutoGlyph_2_in_4 = 0.1; -main_AutoGlyph_2_in_5 = NULL; -main_AutoGlyph_2_in_6 = NULL; -main_AutoGlyph_2_in_7 = NULL; -main_AutoGlyph_2_out_1 = NULL; -main_Integer_5_in_1 = "Integer_5"; -main_Integer_5_in_2 = NULL; -main_Integer_5_in_3 = 1 ; -main_Integer_5_in_4 = NULL; -main_Integer_5_in_5 = NULL; -main_Integer_5_in_6 = NULL; -main_Integer_5_in_7 = NULL; -main_Integer_5_in_8 = NULL; -main_Integer_5_in_9 = NULL; -main_Integer_5_in_10 = NULL; -main_Integer_5_in_11 = NULL; -main_Integer_5_out_1 = 1 ; -main_FileSelector_1_out_1 = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/voronoi.dx"; -main_String_1_out_1 = "area"; -main_Import_1_in_3 = "dx"; -main_Import_1_in_4 = NULL; -main_Import_1_in_5 = NULL; -main_Import_1_in_6 = NULL; -main_Import_1_out_1 = NULL; -main_ShowConnections_2_out_1 = NULL; -main_Color_3_in_2 = "yellow"; -main_Color_3_in_3 = 1.0; -main_Color_3_in_4 = NULL; -main_Color_3_in_5 = NULL; -main_Color_3_out_1 = NULL; -main_Switch_7_out_1 = NULL; -main_Integer_1_in_1 = "Integer_1"; -main_Integer_1_in_2 = NULL; -main_Integer_1_in_3 = 0 ; -main_Integer_1_in_4 = NULL; -main_Integer_1_in_5 = NULL; -main_Integer_1_in_6 = NULL; -main_Integer_1_in_7 = NULL; -main_Integer_1_in_8 = NULL; -main_Integer_1_in_9 = NULL; -main_Integer_1_in_10 = NULL; -main_Integer_1_in_11 = NULL; -main_Integer_1_out_1 = 0 ; -main_Colormap_1_in_1 = { [0.0 0.74683544] [1.0 0.0] }; -main_Colormap_1_in_2 = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] }; -main_Colormap_1_in_3 = { [0.84699454 1.0] }; -main_Colormap_1_in_4 = { [0.84972678 1.0] }; -main_Colormap_1_in_5 = "Colormap_1"; -main_Colormap_1_in_7 = NULL; -main_Colormap_1_in_8 = NULL; -main_Colormap_1_in_9 = NULL; -main_Colormap_1_in_10 = NULL; -main_Colormap_1_in_11 = NULL; -main_Colormap_1_in_12 = { 866025.38 866025.38 }; -main_Colormap_1_in_13 = NULL; -main_Colormap_1_in_14 = NULL; -main_Colormap_1_in_15 = NULL; -main_Colormap_1_in_16 = NULL; -main_Colormap_1_in_17 = 866025.38; -main_Colormap_1_in_18 = 866025.38; -main_Colormap_1_in_19 = NULL; -main_Colormap_1_out_1 = NULL; -main_String_7_out_1 = "white"; -main_ColorBar_1_in_2 = [0.05 0.15]; -main_ColorBar_1_in_3 = [200 15]; -main_ColorBar_1_in_4 = 0; -main_ColorBar_1_in_5 = NULL; -main_ColorBar_1_in_6 = NULL; -main_ColorBar_1_in_7 = NULL; -main_ColorBar_1_in_8 = NULL; -main_ColorBar_1_in_10 = NULL; -main_ColorBar_1_in_11 = NULL; -main_ColorBar_1_in_12 = NULL; -main_ColorBar_1_in_13 = NULL; -main_ColorBar_1_in_14 = NULL; -main_ColorBar_1_in_15 = NULL; -main_ColorBar_1_in_16 = NULL; -main_ColorBar_1_out_1 = NULL; -main_Color_5_in_3 = 1.0; -main_Color_5_in_4 = NULL; -main_Color_5_in_5 = NULL; -main_Color_5_out_1 = NULL; -main_Collect_3_out_1 = NULL; -main_Switch_1_out_1 = NULL; -main_Integer_7_in_1 = "Integer_7"; -main_Integer_7_in_2 = NULL; -main_Integer_7_in_3 = 0 ; -main_Integer_7_in_4 = NULL; -main_Integer_7_in_5 = NULL; -main_Integer_7_in_6 = NULL; -main_Integer_7_in_7 = NULL; -main_Integer_7_in_8 = NULL; -main_Integer_7_in_9 = NULL; -main_Integer_7_in_10 = NULL; -main_Integer_7_in_11 = NULL; -main_Integer_7_out_1 = 0 ; -main_FileSelector_4_out_1 = "/Users/todd/Desktop/svn/cvt/unstructured/run/40962.A.Dgrid/dx/topography.dx"; -main_String_4_out_1 = "tpg"; -main_Import_4_in_3 = "dx"; -main_Import_4_in_4 = NULL; -main_Import_4_in_5 = NULL; -main_Import_4_in_6 = NULL; -main_Import_4_out_1 = NULL; -main_ShowConnections_3_out_1 = NULL; -main_Color_8_in_2 = "black"; -main_Color_8_in_3 = 1.0; -main_Color_8_in_4 = NULL; -main_Color_8_in_5 = NULL; -main_Color_8_out_1 = NULL; -main_Tube_3_in_2 = 0.01; -main_Tube_3_in_3 = NULL; -main_Tube_3_in_4 = NULL; -main_Tube_3_out_1 = NULL; -main_Switch_9_out_1 = NULL; -main_Integer_6_in_1 = "Integer_6"; -main_Integer_6_in_2 = NULL; -main_Integer_6_in_3 = 1 ; -main_Integer_6_in_4 = NULL; -main_Integer_6_in_5 = NULL; -main_Integer_6_in_6 = NULL; -main_Integer_6_in_7 = NULL; -main_Integer_6_in_8 = NULL; -main_Integer_6_in_9 = NULL; -main_Integer_6_in_10 = NULL; -main_Integer_6_in_11 = NULL; -main_Integer_6_out_1 = 1 ; -main_Color_7_in_2 = "black"; -main_Color_7_in_3 = NULL; -main_Color_7_in_4 = NULL; -main_Color_7_in_5 = NULL; -main_Color_7_out_1 = NULL; -main_Switch_8_out_1 = NULL; -main_Integer_10_in_1 = "Integer_10"; -main_Integer_10_in_2 = NULL; -main_Integer_10_in_3 = 1 ; -main_Integer_10_in_4 = NULL; -main_Integer_10_in_5 = NULL; -main_Integer_10_in_6 = NULL; -main_Integer_10_in_7 = NULL; -main_Integer_10_in_8 = NULL; -main_Integer_10_in_9 = NULL; -main_Integer_10_in_10 = NULL; -main_Integer_10_in_11 = NULL; -main_Integer_10_out_1 = 1 ; -main_FileSelector_6_out_1 = "/Volumes/Simulations/MPAS/svn-mpas-model.cgd.ucar.edu/trunk/swmodel/dx/triangle.dx"; -main_String_6_out_1 = "areac"; -main_Import_6_in_3 = "dx"; -main_Import_6_in_4 = NULL; -main_Import_6_in_5 = NULL; -main_Import_6_in_6 = NULL; -main_Import_6_out_1 = NULL; -main_ShowConnections_4_out_1 = NULL; -main_Color_11_in_2 = "yellow"; -main_Color_11_in_3 = 1.0; -main_Color_11_in_4 = NULL; -main_Color_11_in_5 = NULL; -main_Color_11_out_1 = NULL; -main_Switch_12_out_1 = NULL; -main_Integer_9_in_1 = "Integer_9"; -main_Integer_9_in_2 = NULL; -main_Integer_9_in_3 = 1 ; -main_Integer_9_in_4 = NULL; -main_Integer_9_in_5 = NULL; -main_Integer_9_in_6 = NULL; -main_Integer_9_in_7 = NULL; -main_Integer_9_in_8 = NULL; -main_Integer_9_in_9 = NULL; -main_Integer_9_in_10 = NULL; -main_Integer_9_in_11 = NULL; -main_Integer_9_out_1 = 1 ; -main_Colormap_2_in_1 = { [0.0 0.74683544] [1.0 0.0] }; -main_Colormap_2_in_2 = { [0.0 1.0] [0.15300546 1.0] [0.81420765 1.0] [1.0 1.0] }; -main_Colormap_2_in_3 = { [0.84699454 1.0] }; -main_Colormap_2_in_4 = { [0.84972678 1.0] }; -main_Colormap_2_in_5 = "Colormap_2"; -main_Colormap_2_in_7 = NULL; -main_Colormap_2_in_8 = NULL; -main_Colormap_2_in_9 = NULL; -main_Colormap_2_in_10 = NULL; -main_Colormap_2_in_11 = NULL; -main_Colormap_2_in_12 = { 433012.69 433012.69 }; -main_Colormap_2_in_13 = NULL; -main_Colormap_2_in_14 = NULL; -main_Colormap_2_in_15 = NULL; -main_Colormap_2_in_16 = NULL; -main_Colormap_2_in_17 = 433012.69; -main_Colormap_2_in_18 = 433012.69; -main_Colormap_2_in_19 = NULL; -main_Colormap_2_out_1 = NULL; -main_ColorBar_3_in_2 = [0.05 0.075]; -main_ColorBar_3_in_3 = [200 15]; -main_ColorBar_3_in_4 = 0; -main_ColorBar_3_in_5 = NULL; -main_ColorBar_3_in_6 = NULL; -main_ColorBar_3_in_7 = NULL; -main_ColorBar_3_in_8 = NULL; -main_ColorBar_3_in_10 = NULL; -main_ColorBar_3_in_11 = 1.5; -main_ColorBar_3_in_12 = NULL; -main_ColorBar_3_in_13 = NULL; -main_ColorBar_3_in_14 = NULL; -main_ColorBar_3_in_15 = NULL; -main_ColorBar_3_in_16 = NULL; -main_ColorBar_3_out_1 = NULL; -main_Color_10_in_3 = 1.0; -main_Color_10_in_4 = NULL; -main_Color_10_in_5 = NULL; -main_Color_10_out_1 = NULL; -main_Collect_4_out_1 = NULL; -main_Switch_11_out_1 = NULL; -main_Collect_2_in_5 = NULL; -main_Collect_2_out_1 = NULL; -main_ColorBar_2_in_1 = NULL; -main_ColorBar_2_in_2 = [0.05 0.225]; -main_ColorBar_2_in_3 = [200 15]; -main_ColorBar_2_in_4 = 0; -main_ColorBar_2_in_5 = NULL; -main_ColorBar_2_in_6 = NULL; -main_ColorBar_2_in_7 = NULL; -main_ColorBar_2_in_8 = NULL; -main_ColorBar_2_in_9 = {"white"}; -main_ColorBar_2_in_10 = NULL; -main_ColorBar_2_in_11 = NULL; -main_ColorBar_2_in_12 = NULL; -main_ColorBar_2_in_13 = NULL; -main_ColorBar_2_in_14 = NULL; -main_ColorBar_2_in_15 = NULL; -main_ColorBar_2_in_16 = NULL; -macro Image( - id, - object, - where, - useVector, - to, - from, - width, - resolution, - aspect, - up, - viewAngle, - perspective, - options, - buttonState = 1, - buttonUpApprox = "none", - buttonDownApprox = "none", - buttonUpDensity = 1, - buttonDownDensity = 1, - renderMode = 0, - defaultCamera, - reset, - backgroundColor, - throttle, - RECenable = 0, - RECfile, - RECformat, - RECresolution, - RECaspect, - AAenable = 0, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - interactionMode, - title, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels, - webOptions) -> ( - object, - camera, - where) -{ - ImageMessage( - id, - backgroundColor, - throttle, - RECenable, - RECfile, - RECformat, - RECresolution, - RECaspect, - AAenable, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels, - interactionMode, - title, - renderMode, - buttonUpApprox, - buttonDownApprox, - buttonUpDensity, - buttonDownDensity) [instance: 1, cache: 1]; - autoCamera = - AutoCamera( - object, - "front", - object, - resolution, - aspect, - [0,1,0], - perspective, - viewAngle, - backgroundColor) [instance: 1, cache: 1]; - realCamera = - Camera( - to, - from, - width, - resolution, - aspect, - up, - perspective, - viewAngle, - backgroundColor) [instance: 1, cache: 1]; - coloredDefaultCamera = - UpdateCamera(defaultCamera, - background=backgroundColor) [instance: 1, cache: 1]; - nullDefaultCamera = - Inquire(defaultCamera, - "is null + 1") [instance: 1, cache: 1]; - resetCamera = - Switch( - nullDefaultCamera, - coloredDefaultCamera, - autoCamera) [instance: 1, cache: 1]; - resetNull = - Inquire( - reset, - "is null + 1") [instance: 2, cache: 1]; - reset = - Switch( - resetNull, - reset, - 0) [instance: 2, cache: 1]; - whichCamera = - Compute( - "($0 != 0 || $1 == 0) ? 1 : 2", - reset, - useVector) [instance: 1, cache: 1]; - camera = Switch( - whichCamera, - resetCamera, - realCamera) [instance: 3, cache: 1]; - AAobject = - AutoAxes( - object, - camera, - AAlabels, - AAticks, - AAcorners, - AAframe, - AAadjust, - AAcursor, - AAgrid, - AAcolors, - AAannotation, - AAlabelscale, - AAfont, - AAxTickLocs, - AAyTickLocs, - AAzTickLocs, - AAxTickLabels, - AAyTickLabels, - AAzTickLabels) [instance: 1, cache: 1]; - switchAAenable = Compute("$0+1", - AAenable) [instance: 2, cache: 1]; - object = Switch( - switchAAenable, - object, - AAobject) [instance:4, cache: 1]; - SWapproximation_options = - Switch( - buttonState, - buttonUpApprox, - buttonDownApprox) [instance: 5, cache: 1]; - SWdensity_options = - Switch( - buttonState, - buttonUpDensity, - buttonDownDensity) [instance: 6, cache: 1]; - HWapproximation_options = - Format( - "%s,%s", - buttonDownApprox, - buttonUpApprox) [instance: 1, cache: 1]; - HWdensity_options = - Format( - "%d,%d", - buttonDownDensity, - buttonUpDensity) [instance: 2, cache: 1]; - switchRenderMode = Compute( - "$0+1", - renderMode) [instance: 3, cache: 1]; - approximation_options = Switch( - switchRenderMode, - SWapproximation_options, - HWapproximation_options) [instance: 7, cache: 1]; - density_options = Switch( - switchRenderMode, - SWdensity_options, - HWdensity_options) [instance: 8, cache: 1]; - renderModeString = Switch( - switchRenderMode, - "software", - "hardware")[instance: 9, cache: 1]; - object_tag = Inquire( - object, - "object tag")[instance: 3, cache: 1]; - annoted_object = - Options( - object, - "send boxes", - 0, - "cache", - 1, - "object tag", - object_tag, - "ddcamera", - whichCamera, - "rendering approximation", - approximation_options, - "render every", - density_options, - "button state", - buttonState, - "rendering mode", - renderModeString) [instance: 1, cache: 1]; - RECresNull = - Inquire( - RECresolution, - "is null + 1") [instance: 4, cache: 1]; - ImageResolution = - Inquire( - camera, - "camera resolution") [instance: 5, cache: 1]; - RECresolution = - Switch( - RECresNull, - RECresolution, - ImageResolution) [instance: 10, cache: 1]; - RECaspectNull = - Inquire( - RECaspect, - "is null + 1") [instance: 6, cache: 1]; - ImageAspect = - Inquire( - camera, - "camera aspect") [instance: 7, cache: 1]; - RECaspect = - Switch( - RECaspectNull, - RECaspect, - ImageAspect) [instance: 11, cache: 1]; - switchRECenable = Compute( - "$0 == 0 ? 1 : (($2 == $3) && ($4 == $5)) ? ($1 == 1 ? 2 : 3) : 4", - RECenable, - switchRenderMode, - RECresolution, - ImageResolution, - RECaspect, - ImageAspect) [instance: 4, cache: 1]; - NoRECobject, RECNoRerenderObject, RECNoRerHW, RECRerenderObject = Route(switchRECenable, annoted_object); - Display( - NoRECobject, - camera, - where, - throttle) [instance: 1, cache: 1]; - image = - Render( - RECNoRerenderObject, - camera) [instance: 1, cache: 1]; - Display( - image, - NULL, - where, - throttle) [instance: 2, cache: 1]; - WriteImage( - image, - RECfile, - RECformat) [instance: 1, cache: 1]; - rec_where = Display( - RECNoRerHW, - camera, - where, - throttle) [instance: 1, cache: 0]; - rec_image = ReadImageWindow( - rec_where) [instance: 1, cache: 1]; - WriteImage( - rec_image, - RECfile, - RECformat) [instance: 1, cache: 1]; - RECupdateCamera = - UpdateCamera( - camera, - resolution=RECresolution, - aspect=RECaspect) [instance: 2, cache: 1]; - Display( - RECRerenderObject, - camera, - where, - throttle) [instance: 1, cache: 1]; - RECRerenderObject = - ScaleScreen( - RECRerenderObject, - NULL, - RECresolution, - camera) [instance: 1, cache: 1]; - image = - Render( - RECRerenderObject, - RECupdateCamera) [instance: 2, cache: 1]; - WriteImage( - image, - RECfile, - RECformat) [instance: 2, cache: 1]; -} -main_Image_2_in_1 = "Image_2"; -main_Image_2_in_3 = "X24,,"; -main_Image_2_in_4 = 1; -main_Image_2_in_5 = [1855.82 170889 0]; -main_Image_2_in_6 = [1855.82 170889 524001]; -main_Image_2_in_7 = 8190.35; -main_Image_2_in_8 = 1440; -main_Image_2_in_9 = 0.955; -main_Image_2_in_10 = [0 1 0]; -main_Image_2_in_11 = NULL; -main_Image_2_in_12 = 0; -main_Image_2_in_13 = NULL; -main_Image_2_in_14 = 1; -main_Image_2_in_15 = NULL; -main_Image_2_in_16 = NULL; -main_Image_2_in_17 = NULL; -main_Image_2_in_18 = NULL; -main_Image_2_in_19 = 0; -main_Image_2_in_20 = NULL; -main_Image_2_in_21 = NULL; -main_Image_2_in_22 = "black"; -main_Image_2_in_23 = NULL; -main_Image_2_in_25 = "/Users/todd/Desktop/ke.tiff"; -main_Image_2_in_26 = "tiff gamma=1"; -main_Image_2_in_27 = 750; -main_Image_2_in_28 = NULL; -main_Image_2_in_29 = 0; -main_Image_2_in_30 = {"x axis", "y axis", ""}; -main_Image_2_in_31 = { -15 -15 15 }; -main_Image_2_in_32 = NULL; -main_Image_2_in_33 = NULL; -main_Image_2_in_34 = 1; -main_Image_2_in_35 = NULL; -main_Image_2_in_36 = NULL; -main_Image_2_in_37 = {"grey30", "grey5", "yellow", "white"}; -main_Image_2_in_38 = {"background", "grid", "ticks", "labels"}; -main_Image_2_in_39 = 0.5; -main_Image_2_in_40 = NULL; -main_Image_2_in_41 = "none"; -main_Image_2_in_42 = NULL; -main_Image_2_in_43 = NULL; -main_Image_2_in_44 = NULL; -main_Image_2_in_45 = NULL; -main_Image_2_in_46 = NULL; -main_Image_2_in_47 = NULL; -main_Image_2_in_48 = NULL; -main_Image_2_in_49 = NULL; -main_Tube_2_in_1 = NULL; -main_Tube_2_in_2 = NULL; -main_Tube_2_in_3 = 16; -main_Tube_2_in_4 = NULL; -Executive("product version 4 4 4"); -$sync -main(); diff --git a/visualization/dx/voronoi.dx b/visualization/dx/voronoi.dx deleted file mode 100644 index 38b492df9..000000000 --- a/visualization/dx/voronoi.dx +++ /dev/null @@ -1,236 +0,0 @@ -object "positions list" class array type float rank 1 shape 3 items 240000 -ascii data file vor.position.data - -object "edge list" class array type int rank 0 items 240000 -ascii data file vor.edge.data -attribute "ref" string "positions" - -object "loops list" class array type int rank 0 items 40000 -ascii data file vor.loop.data -attribute "ref" string "edges" - -object "face list" class array type int rank 0 items 40000 -ascii data file vor.face.data -attribute "ref" string "loops" - -object 0 class array type float rank 0 items 40000 -data file vor.index.data -attribute "dep" string "faces" - -object 1 class array type float rank 0 items 40000 -data file vor.block.data -attribute "dep" string "faces" - -object 2 class array type float rank 0 items 40000 -data file vor.area.data -attribute "dep" string "faces" - -object 3 class array type float rank 0 items 40000 -data file ./output/div.data -attribute "dep" string "faces" - -object 4 class array type float rank 0 items 40000 -data file ./output/vor.data -attribute "dep" string "faces" - -object 5 class array type float rank 0 items 40000 -data file scalar.data -attribute "dep" string "faces" - -object 6 class array type float rank 0 items 40000 -data file div_analy.data -attribute "dep" string "faces" - -object 7 class array type float rank 0 items 40000 -data file curl_analy.data -attribute "dep" string "faces" - -object 8 class array type float rank 0 items 40000 -data file gradmag_analy.data -attribute "dep" string "faces" - -object 9 class array type float rank 0 items 40000 -data file gradang_analy.data -attribute "dep" string "faces" - -object 10 class array type float rank 0 items 40000 -data file gradmag.data -attribute "dep" string "faces" - -object 11 class array type float rank 0 items 40000 -data file ./output/relative.data -attribute "dep" string "faces" - -object 12 class array type float rank 0 items 40000 -data file ./output/height.data -attribute "dep" string "faces" - -object 13 class array type float rank 0 items 40000 -data file ./output/thickness.data -attribute "dep" string "faces" - -object 14 class array type float rank 0 items 40000 -data file ./output/tracer1.data -attribute "dep" string "faces" - -object 15 class array type float rank 0 items 40000 -data file ./output/tracer2.data -attribute "dep" string "faces" - -object 16 class array type float rank 0 items 40000 -data file ./output/vorTR1.data -attribute "dep" string "faces" - -object 17 class array type float rank 0 items 40000 -data file ./output/thicknessdiff.data -attribute "dep" string "faces" - -object 18 class array type float rank 0 items 40000 -data file ./output/keVD.data -attribute "dep" string "faces" - -object 19 class array type float rank 0 items 40000 -data file ./output/keVDdiff.data -attribute "dep" string "faces" - - - -object "index" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 0 - -object "block" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 1 - -object "area" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 2 - -object "div" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 3 - -object "vor" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 4 - -object "scalar" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 5 - -object "div_analy" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 6 - -object "curl_analy" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 7 - -object "gradmag_analy" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 8 - -object "gradang_analy" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 9 - -object "gradmag" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 10 - -object "relative" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 11 - -object "height" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 12 - -object "thickness" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 13 - -object "tracer1" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 14 - -object "tracer2" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 15 - -object "vorTR1" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 16 - -object "thicknessdiff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 17 - -object "keVD" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 18 - -object "keVDdiff" class field -component "positions" "positions list" -component "edges" "edge list" -component "loops" "loops list" -component "faces" "face list" -component "data" 19 diff --git a/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh new file mode 100755 index 000000000..a9758592a --- /dev/null +++ b/visualization/e3sm_cpl_field_conversion/extract_mpas_cpl_fields.sh @@ -0,0 +1,191 @@ +#!/bin/bash + +usage() +{ +cat< 0.1*nx*dc); - if(dx > 0);, x(1,j) = x(1,j) - nx*dc;, end; - if(dx < 0);, x(1,j) = x(1,j) + nx*dc;, end; - end; - if(abs(dy) > 0.1*ny*dc*sqrt(3)/2); - if(dy > 0);, x(2,j) = x(2,j) - sqrt(3)*nx*dc/2;, end; - if(dy < 0);, x(2,j) = x(2,j) + sqrt(3)*nx*dc/2;, end; - end; - end; - end; - - for j=1:nEdgesOnCell(i) - dlmwrite('./dx/vor.position.data', x(:,j), 'delimiter', '\t', ... - 'precision', '%18.10e', '-append'); - edge(j) = iedge + j - 1; - end; - dlmwrite('./dx/vor.edge.data', edge(1:nEdgesOnCell(i)), ... - 'delimiter', '\t', 'precision', '%10i', '-append') - iloop = iloop + nEdgesOnCell(i); - iedge = iedge + nEdgesOnCell(i); - end; - - end; - -end; - -if (doTri == 1) - - xC_id = netcdf.inqVarID(ncid,'xCell'); - yC_id = netcdf.inqVarID(ncid,'yCell'); - zC_id = netcdf.inqVarID(ncid,'zCell'); - nCellsOnVertex = 3; - cellsOnVertex_id = netcdf.inqVarID(ncid, 'cellsOnVertex'); - areaTriangle_id = netcdf.inqVarID(ncid,'areaTriangle'); - - xC=netcdf.getVar(ncid, xC_id); - yC=netcdf.getVar(ncid, yC_id); - zC=netcdf.getVar(ncid, zC_id); - cellsOnVertex=netcdf.getVar(ncid, cellsOnVertex_id); - areaTriangle = netcdf.getVar(ncid, areaTriangle_id); - - xV_id = netcdf.inqVarID(ncid,'xVertex'); - yV_id = netcdf.inqVarID(ncid,'yVertex'); - zV_id = netcdf.inqVarID(ncid,'zVertex'); - - xV=netcdf.getVar(ncid, xV_id); - yV=netcdf.getVar(ncid, yV_id); - zV=netcdf.getVar(ncid, zV_id); - - work=size(cellsOnVertex); - nVertices = work(:,2) - - if (doWrite == 1) - system('rm -f ./dx/tri.position.data'); - system('rm -f ./dx/tri.edge.data'); - system('rm -f ./dx/tri.loop.data'); - system('rm -f ./dx/tri.face.data'); - system('rm -f ./dx/tri.area.data'); - - iloop=0; - iedge=0; - for i=1:nVertices - dlmwrite('./dx/tri.face.data', i-1, '-append'); - dlmwrite('./dx/tri.area.data', areaTriangle(i), ... - 'precision', '%18.10e', '-append'); - dlmwrite('./dx/tri.loop.data', iloop, ... - 'precision', '%10i', '-append'); - edge(1:3) = iedge; - for j=1:nCellsOnVertex - x(1,j) = xC(cellsOnVertex(j,i)); - x(2,j) = yC(cellsOnVertex(j,i)); - x(3,j) = zC(cellsOnVertex(j,i)); - end; - - if (doPeriodic == 1); - for j=1:nCellsOnVertex; - dx = x(1,j)-xV(i); - dy = x(2,j)-yV(i); - if(abs(dx) > 0.1*nx*dc); - if(dx > 0);, x(1,j) = x(1,j) - nx*dc;, end; - if(dx < 0);, x(1,j) = x(1,j) + nx*dc;, end; - end; - if(abs(dy) > 0.1*ny*dc*sqrt(3)/2); - if(dy > 0);, x(2,j) = x(2,j) - sqrt(3)*nx*dc/2;, end; - if(dy < 0);, x(2,j) = x(2,j) + sqrt(3)*nx*dc/2;, end; - end; - end; - end; - - for j=1:nCellsOnVertex; - dlmwrite('./dx/tri.position.data', x(:,j), 'delimiter', '\t', ... - 'precision', '%18.10e', '-append') - edge(j) = iedge + j - 1; - end; - dlmwrite('./dx/tri.edge.data', edge(1:3), ... - 'delimiter', '\t', 'precision', '%10i', '-append') - iloop = iloop + 3; - iedge = iedge + 3; - end; - - end; - -end; - -if (doVector == 1) - - if (doWrite == 1) - system('rm -f ./dx/vector.position.data'); - system('rm -f ./dx/vector.data'); - end; - - nEdgesOnCell_id = netcdf.inqVarID(ncid,'nEdgesOnCell'); - nEdgesOnCell=netcdf.getVar(ncid, nEdgesOnCell_id); - work=size(nEdgesOnCell(:,1)); - nCells=work(1) - - xC_id = netcdf.inqVarID(ncid,'xCell'); - yC_id = netcdf.inqVarID(ncid,'yCell'); - zC_id = netcdf.inqVarID(ncid,'zCell'); - - xC=netcdf.getVar(ncid, xC_id); - yC=netcdf.getVar(ncid, yC_id); - zC=netcdf.getVar(ncid, zC_id); - - xP = 0.0; - yP = 0.0; - zP = 1.0; - - for i=1:nCells - - a(1) = xC(i); - a(2) = yC(i); - a(3) = zC(i); - - b(1) = xP; - b(2) = yP; - b(3) = zP; - - c(1) = a(2)*b(3) - a(3)*b(2); - c(2) = a(3)*b(1) - a(1)*b(3); - c(3) = a(1)*b(2) - a(2)*b(1); - - - if (doWrite == 1) - - dlmwrite('./dx/vector.position.data', xC(i), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.position.data', yC(i), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.position.data', zC(i), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.data', c(1), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.data', c(2), ... - 'precision', '%18.10e', '-append') - - dlmwrite('./dx/vector.data', c(3), ... - 'precision', '%18.10e', '-append') - - - end; - -end; - -end; - -netcdf.close(ncid) \ No newline at end of file diff --git a/visualization/mean_section/ColdHot.m b/visualization/mean_section/ColdHot.m deleted file mode 100644 index c93e0629f..000000000 --- a/visualization/mean_section/ColdHot.m +++ /dev/null @@ -1,44 +0,0 @@ -function B = ColdHot(m) -% A colormap for blue cold, white zero, Hot positives. - -if nargin < 1, m = 256; end - -n = fix(m/8); - -% Create cold part: -A = [ - 102 0 102; - 0 41 253; - 102 153 255; - 41 255 255; - 255 255 255]/255; -%A = ones(size(A)) - A; - -v = [n-1 n n n]; - -cold = linspacev(A,v); - -% Create hot part: -A = [ - 255 255 255; - 255 255 0; - 255 102 41; - 255 0 0; - 102 41 0]/255; - -v = [n n n n-1]; -myhot = linspacev(A,v); - - -B = [cold; myhot]; - -%B = [B; flipud(hot(fix(m/2)))]; - - -% Original cold part, 8/2/02: -A = [ - 102 0 102; - 41 0 153; - 0 0 204; - 42 102 255; - 255 255 255]/255; \ No newline at end of file diff --git a/visualization/mean_section/example_sections.m b/visualization/mean_section/example_sections.m deleted file mode 100644 index cb7b578c8..000000000 --- a/visualization/mean_section/example_sections.m +++ /dev/null @@ -1,109 +0,0 @@ -% example_sections.m - -% This file simply contains example cross sections with text names. -% -% Mark Petersen, MPAS-Ocean Team, LANL, March 2014 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% see exampleSections.m for more example sections. - -% sectionText a cell array with text describing each section -sectionText = { -'N Atlantic zonal mean',... -'N Atlantic EUC zonal mean',... -'Eq Pacific 140W lon',... - }; - -% coord(nSections,4) endpoints of sections, with one section per row as -% [startLat startLon endLat endLon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -coord = [... - 0 -80 60 0;... % N Atlantic zonal mean - ]; - -coord = [... - 0 -90 60 -60;... % N Atlantic zonal mean - ]; - -coord = [... - -8 -140 10 -140;... % Eq Pac Meridional - ]; - -coord = [... - -8 -170 10 -95;... % Eq Pac Meridional - ]; - -coord = [... - 0 -80 60 0;... % N Atlantic zonal mean - 21 -80 45 -60;... % DWBC N Atl meridional section - 21 283 32 285;... % DWBC N Atl meridional section - ]; - -coord = [... - -35 -80 70 -1;... % N Atlantic zonal mean - ]; - -coord = [... - -35 -97 70 -1;... % N Atlantic zonal mean - ]; -nSections = size(coord,1); - -% number of points to plot for each figure -nLat = 100; -nLon = 100; - -% Direction to take mean: zonal (z) or meridional (m) -meanDirection = 'z'; - -% plotDepth(nSections) depth to which to plot each section, in m -plotDepth = 5000*ones(1,size(coord,1)); - -% For plotting, only four plots are allowed per row. -% Choose sections above for each page. -% page.name name of this group of sections -% sectionID section numbers for each row of this page -page(1).name = 'NA'; -page(1).sectionID = [1:nSections]; - -% coord range may need alteration to match lonVertex: -% If lonVertex is between 0 and 2*pi, ensure the coordinate range is 0 to 360. -%coord(:,2) = mod(coord(:,2),360); -%coord(:,4) = mod(coord(:,4),360); -% If lonVertex is between -pi and pi, ensure the coordinate range is -180 to 180. -coord(:,2) = mod(coord(:,2)+180,360)-180; -coord(:,4) = mod(coord(:,4)+180,360)-180; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables to view -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% see exampleSections.m for more example variables - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -var_name = {... -'avgVelocityZonal',... -'avgVelocityMeridional',... -'ke_avgVelocity'}; - -var_name = {... -'avgVelocityMeridional',... -}; - -var_conv_factor = [100 100 1]; % convert m/s to cm/s for velocities - -%var_lims = [-20 20 2.0; -10 10 1.0; 0 20 2.5]; -var_lims = [-1 1 .1; -10 10 1.0; 0 20 2.5]; -%var_lims = [-5 5 .5; -10 10 1.0; 0 20 2.5]; - -%var_lims = [-110 110 10.0; -10 10 1.0; 0 20 2.5]; - diff --git a/visualization/mean_section/find_cell_weights.m b/visualization/mean_section/find_cell_weights.m deleted file mode 100644 index 595da92e3..000000000 --- a/visualization/mean_section/find_cell_weights.m +++ /dev/null @@ -1,174 +0,0 @@ -function [cellsOnVertexSection, cellWeightsSection, latSection,lonSection, ... - refLayerThickness, refMidDepth, refBottomDepth, maxLevelCellSection, sphere_radius] = find_cell_weights ... - (wd,dir,netcdf_file,sectionText,coord,nLat,nLon) - -% This function reads grid data from an MPAS-Ocean grid or restart -% netCDF file, and finds a path of cells that connect the endpoints -% specified in coord. The path is forced to travel through cells -% that are closest to the line connecting the beginning and end -% cells. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% sectionText a cell array with text describing each section -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% -%%%%%%%%%% output arguments %%%%%%%%% -% cellsOnVertexSection(vertexDegree,nLat,nLon,nSections) cells neighboring nearest vertex -% cellWeightsSection(vertexDegree,nLat,nLon,nSections) weights for each cell -% latSection(nLat,nSections) lat coordinates of each section -% lonSection(nLon,nSections) lon coordinates of each section -% maxLevelCellSection(nLat,nLon,nSections) min of maxLevelCell of cells surrounding vertex -% refMidDepth(nVertLevels) depth of center of each layer, for plotting -% latCellDeg(nCells) lat arrays for all cells -% lonCellDeg(nCells) lon arrays for all cells - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read data from file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** find_cell_sections, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ] -ncid = netcdf.open(filename,'nc_nowrite'); - -xCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'xCell')); -yCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'yCell')); -zCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'zCell')); -latVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latVertex')); -lonVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonVertex')); -xVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'xVertex')); -yVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'yVertex')); -zVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'zVertex')); -cellsOnVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'cellsOnVertex')); -refLayerThickness = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refLayerThickness')); -refBottomDepth = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refBottomDepth')); -maxLevelCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'maxLevelCell')); -sphere_radius = netcdf.getAtt(ncid,netcdf.getConstant('NC_GLOBAL'),'sphere_radius'); -[dimname,nVertices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertices')); -[dimname,vertexDegree]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'vertexDegree')); -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); -netcdf.close(ncid) - -nSections = size(coord,1); - -% Compute depth of center of each layer, for plotting -refMidDepth(1) = refLayerThickness(1)/2; -for i=2:nVertLevels - refMidDepth(i) = refMidDepth(i-1) + 0.5*(refLayerThickness(i) + refLayerThickness(i-1)); -end -% depth(1)=0; % make top layer plot to surface - -latSection = zeros(nLat,nSections); -lonSection = zeros(nLon,nSections); -maxLevelCellSection = zeros(nLat,nLon,nSections); -nearestVertexSection = zeros(nLat,nLon,nSections); -cellsOnVertexSection = zeros(vertexDegree,nLat,nLon,nSections); -cellWeightsSection = zeros(vertexDegree,nLat,nLon,nSections); -margin=.5; - -for iSection=1:nSections - fprintf('Finding nearest vertices for Section %g \n',iSection) - latSection(:,iSection) = linspace(coord(iSection,1),coord(iSection,3),nLat); - lonSection(:,iSection) = linspace(coord(iSection,2),coord(iSection,4),nLon); - - maxLon = (max(lonSection(:,iSection))+margin)*pi/180; - minLon = (min(lonSection(:,iSection))-margin)*pi/180; - -% maxLat = (max(latSection(:,iSection))+margin)*pi/180; -% minLat = (min(latSection(:,iSection))-margin)*pi/180; - -% vInd = find(latVertex>minLat&latVertexminLon&lonVertexminLat&latVertexminLon&lonVertex ' doc_dir '/' filename ]); - -% if not(strcmp(latex_command,'none')) -% fprintf('*** Compiling latex document \n') -% cd(doc_dir); -% unix([latex_command ' ' filename]); -% cd('../..'); -% end - -end % iSim - - diff --git a/visualization/mean_section/sub_plot_cross_sections.m b/visualization/mean_section/sub_plot_cross_sections.m deleted file mode 100644 index 3b5483d64..000000000 --- a/visualization/mean_section/sub_plot_cross_sections.m +++ /dev/null @@ -1,210 +0,0 @@ -function sub_plot_cross_sections(dir,netcdfFile,sectionText,pageName,sectionID,sectionData,refMidDepth,refBottomDepth,... - latSection,lonSection, maxLevelCellSection,coord, plotDepth,... - var_name,var_lims,meanDirection,fid_latex) - -% Plot cross-sections of MPAS fields. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% sectionText a cell array with text describing each section -% sectionID section numbers for each row of this page -% pageName name of this group of sections -% sectionData(nVertLevels,nPoints,nSections,nVars) -% data in each cross-section for each variable -% refMidDepth(nVertLevels) depth of center of each layer, for plotting -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% plotDepth(nSections) depth to which to plot each section -% var_lims(nVars,3) contour line definition: min, max, interval -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% meanDirection Direction to take mean: zonal (z) or meridional (m) -% fid_latex file ID of latex file - -fprintf(['** sub_plot_cross_sections simulation: ' dir '/' netcdfFile ... - ' plotting page: ' pageName '\n']) - -px = [.28 .78]; -py=linspace(.84,.13,4); % Midpoint position of plots -pw = [.4]; ph=[.18]; % width and height of plots - -nPoints = size(sectionData,2); -nSections = size(sectionData,3); -nVars = size(sectionData,4); - -for iVar=1:nVars - %figure(iVar+1); clf - %set(gcf,'Position',[100+(iVar*100) 600-iVar*100 550 400]) - %temptext2=char(var_name(iVar)); - - for iRow = 1:length(sectionID) - figure(10*iRow+iVar); clf - %set(gcf,'Position',[100+(iRow*100) 1200-iRow*100 550 400]) - iSection = sectionID(iRow); - if meanDirection == 'z' % zonal mean - xtext = 'latitude'; - xaxis = latSection(:,iSection); - elseif meanDirection == 'm' % meridional mean - xtext = 'longitude'; - xaxis = lonSection(:,iSection); - end - - % left column - % ha=subplot('position',[px(1)-pw/2 py(iRow)-ph/2 pw ph]); - % temptext = char(sectionText(iSection)); - % if temptext2(1:6)=='ke_fromAvgVelocity' - % h=surf(xaxis, refMidDepth,log10(sectionData(:,1:nCellsInSection(iSection),iSection,iVar))); - % set(gca,'CLim',[-1 1.2]) - % else - % h=surf(xaxis, refMidDepth,sectionData(:,1:nCellsInSection(iSection),iSection,iVar)); - % end - - % set(h,'EdgeColor','none') - % view(0,-90) - % title([temptext ', cm/s']) - % ylabel('depth, m') - % xlabel(xtext) - % axis tight - % set(gca,'YLim',[0 plotDepth(iSection)]) - % h=colorbar ; - % if temptext2(1:6)=='ke_fromAvgVelocity' -% set(h,'YTick',[-1:1:1.2],'YTickLabel',[0.1 1 10]) - % end - - % right column - -px = [.53]; -py=.53; -pw = [.85]; ph=[.83]; % width and height of plots - ha=subplot('position',[px(1)-pw/2 py-ph/2 pw ph]); - temptext = char(sectionText(iSection)); - hold on -% contour(xaxis, refMidDepth,sectionData(:,1:nCellsInSection(iSection),iSection,iVar), ... -% [var_lims(iVar,1):var_lims(iVar,3):var_lims(iVar,2)]); -% set(gca,'CLim',var_lims(iVar,1:2)) - - %%%%%% special commands for DWBC mrp - % imitating colorbar at http://www.agu.org/journals/jc/jc1203/2011JC007586/figures.shtml#fig10 - - %xaxis = xaxis - 360*ones(size(xaxis)); - % xaxis is now in longitude. Convert to Distance (km) - % along 26.5N east of 77W - %xaxis = (xaxis+77)*99; % for DWBC only - %contour_lims = [-25 -20 -15 -10 -5 -2 -1 1 2 5 10 15 20 25]; - %contour_lims = [-20 -15 -10 -5 -2 0 2 5 10 15 20 25 30]; - contour_lims = [var_lims(iVar,1):var_lims(iVar,3):var_lims(iVar,2)]; - [cout,h]=contourf(xaxis, refMidDepth,sectionData(:,:,iSection,iVar), ... - contour_lims); - set(gca,'CLim',[min(contour_lims) max(contour_lims)]) - set(h,'LineColor',[.5 .5 .5]) - cbfrac=0; - - % Text labels on countours - [cout,h]=contour(xaxis, refMidDepth,sectionData(:,:,iSection,iVar),... - contour_lims); - ls=[200]; - clabel(cout,h,'fontsize',10,'color','k','rotation',0,'LabelSpacing',ls); - set(h,'LineColor',[.5 .5 .5]) - - % Black lines - %[cout,h]=contour(xaxis, refMidDepth,sectionData(:,:,iSection,iVar),[-100:50:100]); - %set(h,'LineColor',[0 0 0],'LineWidth',1) - - % stretched colorbar using contour_lims: - cmin=min(contour_lims); - cmax=max(contour_lims); -% cvalue = cmin-.5*dc:dc:cmax+.5*dc; - nc_orig = 256; - nc = length(contour_lims); - cmap_orig = ColdHot(nc_orig); - cmap_orig_short = zeros(nc-1,3); - ind=(.5:1:nc-.5); - for j=1:nc-1 - cmap_orig_short(j,:) = cmap_orig( floor((j-.5)/(nc-1)*nc_orig),:); - end - - cvalue = linspace(cmin,cmax,256); - nc_inc = length(cvalue); - - cmapnew = zeros(nc_inc,3); - for jnew=2:nc_inc - jold = max(min(min(find(contour_lims>=cvalue(jnew))),nc)-1,1); - cmapnew(jnew-1,:) = cmap_orig_short(jold,:); - end - cmapnew(nc_inc,:) = cmap_orig_short(nc-1,:); - - colormap(cmapnew) - %colorbarf_spec(cout,h,'vert',contour_lims); - %xlabel('Distance (km) along 26.5N east of 77W') % for DWBC only - - axis tight - %set(gca,'YLim',[0 plotDepth(iSection)],'XLim',[0 175]) % for DWBC only - %set(gca,'YTick',[0:1000:5000],'XTick',[0:25:175]) - set(gca,'YLim',[0 plotDepth(iRow)]) - %set(gca,'YTick',[0:100:400]) - xlabel(xtext) - % set(gca,'XTick',-1*[80:.5:70]) - %%%%%% special commands for DWBC mrp end - - %%%%%% special commands for EUC mrp end - %if iRow==2 - % set(gca,'XTick',[143 156 165 180 190 205 220 235 250 265]) - % set(gca,'XTickLabel',{'143' '156' '165E' '180' '170W' '155' '140' '125' '110' '95'}) - %end - - %%%%%% special commands for EUC mrp end - - set(gca,'YDir','reverse') - title([temptext ', ' char(var_name(iVar))]) - ylabel('depth, m') - grid on - set(gca,'layer','top'); - h=colorbar; - - % mrp draw bottom based on zero contour - hold on - n = nPoints; - % old way: maxLevelCell=zeros(1,n); - x(2:n) = (xaxis(1:n-1)+xaxis(2:n))/2; - x(1) = xaxis(1) - (xaxis(2)-xaxis(1))/2; - x(n+1) = xaxis(n) + (xaxis(n)-xaxis(n-1))/2; - b = max(refBottomDepth); - for j=1:n - % old way: maxLevelCell(j)=max(min(find(sectionData(:,j,iSection,iVar)==0.0))-1,1); - %depthline(j) = refBottomDepth(maxLevelCellSection(j,iSection)); - %h=patch([x(j) x(j+1) x(j+1) x(j) x(j)],... - % [b b depthline(j) depthline(j) b], [.5 .5 .5]); - %set(h,'LineStyle','none') - end - - % mrp draw bottom based on zero contour end - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 5.5 3.2]) - subplot('position',[0 .95 1 .05]); axis off - title_txt = [regexprep(char(var_name(iVar)),'_','\\_') ', ' regexprep(dir,'_','\\_')]; -% h=text(.55,.4,title_txt); -% set(h,'HorizontalAlignment','center','FontWeight','bold','FontSize',14) -% text(.005,.7,[ date ]); - - unix(['mkdir -p f/' dir ]); - temp=['f/' dir '/' netcdfFile '_' pageName num2str(iRow) '_' ... - char(var_name(iVar))]; - filename = regexprep(temp,'\.','_'); - print('-djpeg',[filename '.jpg']); - print('-depsc2',[filename '.eps']); - unix(['epstopdf ' filename '.eps --outfile=' filename '.pdf']); - fprintf(fid_latex,['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); - - % print('-depsc2',[filename '.eps']) - - end - - -end - diff --git a/visualization/mean_section/sub_plot_section_locations.m b/visualization/mean_section/sub_plot_section_locations.m deleted file mode 100644 index 6e100eb21..000000000 --- a/visualization/mean_section/sub_plot_section_locations.m +++ /dev/null @@ -1,97 +0,0 @@ -function sub_plot_section_locations(dir,coord, ... - latSection,lonSection,fid_latex) - -% Plot section locations on world map - -% Mark Petersen, MPAS-Ocean Team, LANL, Sep 2012 - -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% fid_latex file ID of latex file - -fprintf(['** sub_plot_cell_sections, on figure 1.\n']) - -nSections = size(coord,1); - -figure(1); clf - - minLon = -180.0; - latTrans = 360.0; - - % plot topo data of the earth. This is just low-rez one deg - % data for visual reference. - load('topo.mat','topo','topomap1'); - if minLon==-180 - topoNew(:,1:180) = topo(:,181:360); - topoNew(:,181:360) = topo(:,1:180); - image([-180 180],[-90 90],topoNew,'CDataMapping', 'scaled'); - else - image([0 360],[-90 90],topo,'CDataMapping', 'scaled'); - end - - colormap(topomap1); - set(gca,'YDir','normal') - - hold on - - % world - axis tight - set(gca,'XTick',30*[-10:12]) - set(gca,'YTick',15*[-20:20]) - - % half world -% axis([-360+latTrans 0+latTrans -80 70]) -% set(gca,'XTick',20*[-10:20]) -% set(gca,'YTick',10*[-20:20]) - - % N Atlantic -% axis([-90+latTrans -5+latTrans -5 70]) -% set(gca,'XTick',[-100:5:360]) -% set(gca,'YTick',[-90:5:90]) - - % Drake passage -% axis([-90+latTrans,-50+latTrans,-75,-50]) -% set(gca,'XTick',[-100:2:360]) - % set(gca,'YTick',[-200:2:200]) - - % Pacific -% axis([130 260 -10 10]) -% set(gca,'XTick',[0:1:300]) -% set(gca,'YTick',[-20:.1:20]) - - hold on - grid on - - for iSection=1:nSections - h=plot(coord(iSection,[2 2 4 4 2 4 2 4]),... - coord(iSection,[1 3 3 1 1 3 3 1]),'y-'); - set(h,'Color','y','LineWidth',1) - h=text(lonSection(1,iSection),latSection(1,iSection), ... - num2str(iSection)); - - set(h,'Color',[1 1 1],'FontWeight','bold') - %h=plot(lonSection(:,iSection),latSection(:,iSection),'y.'); - end - - ylabel('latitude') - xlabel('longitude') - title(['Domain: ' regexprep(dir,'_','\\_') ' Areas for means. ']) - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 6 4]) - - subplot('position',[0 .95 1 .05]); axis off - text(.005,.7,[ date ]); - - unix(['mkdir -p f/' dir ]); - filename=['f/' dir '/' 'mean_location_map' ]; - print('-djpeg',[filename '.jpg']) - - % put printing text in a latex file - fprintf(fid_latex,... - ['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); diff --git a/visualization/mean_section/triArea.m b/visualization/mean_section/triArea.m deleted file mode 120000 index 6c9e81502..000000000 --- a/visualization/mean_section/triArea.m +++ /dev/null @@ -1 +0,0 @@ -../cross_section/triArea.m \ No newline at end of file diff --git a/visualization/moc/ColdHot.m b/visualization/moc/ColdHot.m deleted file mode 100644 index c93e0629f..000000000 --- a/visualization/moc/ColdHot.m +++ /dev/null @@ -1,44 +0,0 @@ -function B = ColdHot(m) -% A colormap for blue cold, white zero, Hot positives. - -if nargin < 1, m = 256; end - -n = fix(m/8); - -% Create cold part: -A = [ - 102 0 102; - 0 41 253; - 102 153 255; - 41 255 255; - 255 255 255]/255; -%A = ones(size(A)) - A; - -v = [n-1 n n n]; - -cold = linspacev(A,v); - -% Create hot part: -A = [ - 255 255 255; - 255 255 0; - 255 102 41; - 255 0 0; - 102 41 0]/255; - -v = [n n n n-1]; -myhot = linspacev(A,v); - - -B = [cold; myhot]; - -%B = [B; flipud(hot(fix(m/2)))]; - - -% Original cold part, 8/2/02: -A = [ - 102 0 102; - 41 0 153; - 0 0 204; - 42 102 255; - 255 255 255]/255; \ No newline at end of file diff --git a/visualization/moc/compute_moc_from_w.m b/visualization/moc/compute_moc_from_w.m deleted file mode 100644 index 0e58ec640..000000000 --- a/visualization/moc/compute_moc_from_w.m +++ /dev/null @@ -1,48 +0,0 @@ -function [mocTop] = compute_moc_from_w ... - (vertVelocityTop, botDepth, ... - latCell,lonCell, areaCell,transport,mocLat,landMask) - -% Compute moc streamfunction from vertical velocity -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% -%%%%%%%%%% output arguments %%%%%%%%% -% mocTop(nVertLevels,nLat) -% data in each cross-section for each variable - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Load large variables -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** compute moc simulation: \n']) - -nVertLevels = length(botDepth); -nCells = length(areaCell); -nLat = length(mocLat); - -mocTop = zeros(nVertLevels+1,nLat); - -for k=2:nVertLevels+1 - mocTop(k,1) = mocTop(k-1,1) + transport(k-1)*1e6; -end - -for iLat = 2:nLat - ind = find(landMask==1 & latCell>=mocLat(iLat-1) & latCellabs(latChange) % zonal section - if lonChange>0 - fprintf(['Warning: Zonal sections should go from east to west. ' ... - 'For section %g start and end longitudes are %g, %g \n'], ... - j,sectionCoord(j,2),sectionCoord(j,4)) - end - else - if latChange<0 - fprintf(['Warning: Meridional sections should go from south to north. ' ... - 'For section %g start and end latitudes are %g, %g \n'], ... - j,sectionCoord(j,1),sectionCoord(j,3)) - end - end - -end - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read edge and edge data from grid file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** find_edge_sections, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ]; -ncid = netcdf.open(filename,'nc_nowrite'); - -latVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latVertex')); -lonVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonVertex')); -verticesOnEdge = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'verticesOnEdge')); -edgesOnVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'edgesOnVertex')); -[dimname,nEdges]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nEdges')); -[dimname,nVertices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertices')); -[dimname,vertexDegree]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'vertexDegree')); -netcdf.close(ncid) - -% Grid variables should be: -% lat varies from -pi/2:pi/2 -% lon varies from 0:2*pi -if (min(lonVertex)<-1e-8) - lonVertex = mod(lonVertex,2*pi); -end -% inserted for lon -180:180 -lonVertex = mod(lonVertex+pi,2*pi)-pi; - -% convert to degrees for plotting: -latVertexDeg = latVertex*180/pi; -lonVertexDeg = lonVertex*180/pi; - -sectionVertexIndex = zeros(maxEdges,nSections); -sectionEdgeIndex = zeros(maxEdges,nSections); -sectionEdgeSign = zeros(maxEdges,nSections); -nEdgesInSection = zeros(1,nSections); - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Find edges that connect beginning and ending points -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -for iSection=1:nSections - latCoord = [sectionCoord(iSection,1) sectionCoord(iSection,3)]/180*pi; - lonCoord = [sectionCoord(iSection,2) sectionCoord(iSection,4)]/180*pi; - - % Find vertex closest to start and end coordinates. - % The seed vertex array simply stores start and end index. - minDist = 1e10*ones(1,2); - seedVertexIndex = zeros(1,2); - for iVertex = 1:nVertices - for i=1:2 - dist = sqrt( ... - (lonCoord(i) - lonVertex(iVertex))^2 ... - + (latCoord(i) - latVertex(iVertex))^2); - if (dist0) - % Find the vertex on the other side of iEdge - if (verticesOnEdge(1,iEdge)==sectionVertexIndex(i,iSection)) - iVertex = verticesOnEdge(2,iEdge); - % Going from vertex 1 to vertex 2. Leave positive. - edgeSign = 1; - else - iVertex = verticesOnEdge(1,iEdge); - % Going from vertex 2 to vertex 1. Make negative. - edgeSign = -1; - end - - % I am using lat/lon Cartesian distance. - % This is distance to the final vertex location. - dist = sqrt( ... - (lonVertex(iVertex) - lonVertex(endVertexIndex))^2 ... - + (latVertex(iVertex) - latVertex(endVertexIndex))^2 ); - -%fprintf('%6i %6i %8.4f %8.4f h1=plot(%g,%g); h2=plot(%g,%g); \n',... -%i,j,dist,distLastVertex,... -% lonVertex(iVertex)*180/pi,latVertex(iVertex)*180/pi,... -% lonVertex(endVertexIndex)*180/pi,latVertex(endVertexIndex)*180/pi) - % check if this vertex is closer to the end vertex than the - % last vertex. If so, it is a candidate, and we can continue. - if (dist=70)) = 0.0; -land_mask(find( lat<-35)) = 0.0; - -% mask out eastern boundary -land_mask(find( lat>=-35 & lat<10 & lon180>22)) = 0.0; -land_mask(find( lat>= 10 & lat<49 & lon180> 0)) = 0.0; -land_mask(find( lat>= 49 & lat<66 & lon180>13)) = 0.0; -land_mask(find( lat>= 66 & lat<70 & lon180>30)) = 0.0; - -% mask out western boundary -land_mask(find( lat>=-35 & lat< 9 & lon180<-63)) = 0.0; -land_mask(find( lat>= 9 & lat<14 & lon180<-84)) = 0.0; -land_mask(find( lat>= 14 & lat<17 & lon180<-89)) = 0.0; -land_mask(find( lat>= 17 & lat<50 & lon180<-98)) = 0.0; -land_mask(find( lat>= 50 & lat<70 & lon180<-70)) = 0.0; - - diff --git a/visualization/moc/land_mask_global.m b/visualization/moc/land_mask_global.m deleted file mode 100644 index caf4dd6bf..000000000 --- a/visualization/moc/land_mask_global.m +++ /dev/null @@ -1,23 +0,0 @@ -function [land_mask] = land_mask_global(lat,lon) - -% Given latitude and longitude coordinates, produce a land mask -% land_mask = 1 in specified region -% land_mask = 0 elsewhere -% -% Mark Petersen, MPAS-Ocean Team, LANL, January 2013 -% -%%%%%%%%%% input arguments %%%%%%%%% -% lat(nPoints) latitude in degrees, ranging from 0 to 360 -% or -180 to 180 -% lon(nPoints) longitude in degrees, ranging from -90 to 90 -% -%%%%%%%%%% output arguments %%%%%%%%% -% land_mask(nPoints) - -if size(lat) ~= size(lon) - fprintf('Size of lat and lon must be the same.\n') - return -end - -% for global, include all points -land_mask = ones(size(lat)); diff --git a/visualization/moc/load_large_variables_edge.m b/visualization/moc/load_large_variables_edge.m deleted file mode 100644 index 249302aa8..000000000 --- a/visualization/moc/load_large_variables_edge.m +++ /dev/null @@ -1,78 +0,0 @@ -function [sectionData] = load_large_variables_edge ... - (wd,dir,netcdf_file, var_name, var_conv_factor, ... - sectionEdgeIndex, nEdgesInSection) - -% Load large variables from netcdf file -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% sectionEdgeIndex(maxEdges,nSections) cell index of each section -% nEdgesInSection(nSections) number of cells in each section -% -%%%%%%%%%% output arguments %%%%%%%%% -% sectionData(nVertLevels,max(nEdgesInSection),nSections,nVars) -% data in each cross-section for each variable - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Load large variables -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** load_large_variables simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ]; -ncid = netcdf.open(filename,'nc_nowrite'); - -nAverage = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'nAverage')); - -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); -[dimname,nEdges]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nEdges')); - -% see if nTimeSlices dimension exists. If not, set nTimeSlices to 1. -try - [dimname,nTimeSlices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'Time')); -catch - nTimeSlices = 1; -end - -nVars = length(var_name); -nSections = length(nEdgesInSection); - -maxNumberEdges = max(nEdgesInSection); -sectionData = zeros(nVertLevels,maxNumberEdges,nSections,nVars); - -for iVar=1:nVars - temptext = char(var_name(iVar)); - fprintf(['loading: ' temptext '\n']) - -% acc_var = netcdf.getVar(ncid,netcdf.inqVarID(ncid,char(var_name(iVar)))); -% mean_var = zeros(nVertLevels, nEdges); -% for i=1:nTimeSlices -% mean_var = mean_var + nAverage(i)*squeeze(acc_var(:,:,i)); -% end -% mean_var = mean_var/sum(nAverage)*var_conv_factor(iVar); - - avgNormalVelocity = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'avgNormalVelocity')); - - for iSection = 1:nSections - for i=1:nEdgesInSection(iSection) - iEdge = sectionEdgeIndex(i,iSection); - for k=1:nVertLevels -% sectionData(k,i,iSection,iVar) = mean_var(k,iEdge); - sectionData(k,i,iSection,iVar) = avgNormalVelocity(k,iEdge); - end - end - end - -end -netcdf.close(ncid) - -fprintf('\n') - diff --git a/visualization/moc/load_vertical_velocity.m b/visualization/moc/load_vertical_velocity.m deleted file mode 100644 index d8db32714..000000000 --- a/visualization/moc/load_vertical_velocity.m +++ /dev/null @@ -1,45 +0,0 @@ -function [avgVertVelocityTop, refBottomDepth, latCell,lonCell, areaCell, nVertLevels] ... - = load_vertical_velocity ... - (wd,dir,netcdf_file,vert_var_name) - -% load vertical velcoity -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% -%%%%%%%%%% output arguments %%%%%%%%% -% refBottomDepth(nVertLevels) depth of center of each layer, for plotting -% vertVelocityTop(nVertLevels,nCells) vertical velocity at cell center, top - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read data from file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** compute u_acc, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ] -ncid = netcdf.open(filename,'nc_nowrite'); - -temp=char(vert_var_name(1)); -if temp(1:7)=='avgEddy' - fprintf('Computing eddy-induced vertical velocity') - avgVertVelocityTopEulerian = netcdf.getVar(ncid,netcdf.inqVarID(ncid,char(vert_var_name(2)))); - avgVertVelocityTopTransport = netcdf.getVar(ncid,netcdf.inqVarID(ncid,char(vert_var_name(3)))); - avgVertVelocityTop = avgVertVelocityTopEulerian - avgVertVelocityTopTransport; -else - avgVertVelocityTop = netcdf.getVar(ncid,netcdf.inqVarID(ncid,char(vert_var_name(1)))); -end - -refBottomDepth = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refBottomDepth')); -latCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latCell'))*180/pi; -lonCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonCell'))*180/pi; -areaCell = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'areaCell')); -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); - -netcdf.close(ncid) - -fprintf(['\n']) diff --git a/visualization/moc/moc.m b/visualization/moc/moc.m deleted file mode 100644 index 76d3438d0..000000000 --- a/visualization/moc/moc.m +++ /dev/null @@ -1,259 +0,0 @@ -% function moc - -% Plot cross-sections of means of MPAS fields. -% -% This is the main function, where the user can specify data files, -% coordinates and text, then call functions to find sections, load -% data, and plot cross-sections. -% -% The final product is a set of plots as jpg files, a latex file, -% and a compiled pdf file of the plots, if desired. -% -% Mark Petersen, MPAS-Ocean Team, LANL, January 2013 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify data files -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% all plots are placed in the f directory. Comment out if not needed. -unix('mkdir -p f docs'); - -% The text string [wd '/' sim(i).dir '/' sim(i).netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. - -wd = '/var/tmp/mpeterse/runs/'; - -% These files only need to contain a small number of variables. -% You may need to reduce the file size before copying to a local -% machine using: -% ncks -v avgNormalVelocityReconstructMeridional,avgNormalVelocityReconstructZonal, \ -% nAverage,latVertex,lonVertex,verticesOnEdge,edgesOnVertex,refLayerThickness,\ -% dvEdge,latCell,lonCell,cellsOnCell,nEdgesOnCell \ -% file_in.nc file_out.nc - -dir='m91'; -abc='klmnop'; - -for j=1:length(abc) - sim(j).dir = [dir abc(j)]; - sim(j).netcdf_file = ['output_total_avg.nc']; -end - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% Choose Atlantic or global MOC -%region='Atlant' - region='global' - -% Compute MOC using section -% [startlat startlon endlat endlon] -if region=='Atlant' - sectionText = {'Atlantic MOC'}; - sectionCoord = [-34.5 19.9 -34.5 -55] % '34.5S, South America to Africa -63 to 22 - mocLat = [-34.5:.5:70]; -elseif region=='global' - sectionText = {'Global MOC'}; - mocLat = [-80:.5:85]; -else - fprintf('Incorrect region name') - return -end - -% For plotting, only four plots are allowed per row. -% Choose sections above for each page. -% page.name name of this group of sections -% sectionID section numbers for each row of this page -page(1).name = 'NA'; -page(1).sectionID = [1:1]; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables to view -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -% Typical variables used for plotting: -% Eulerian velocity from prognostic momentum equation -hor_var_name = {'avgNormalVelocity'};vert_var_name = {'avgVertVelocityTop'};fign=1; -% total transport velocity -%hor_var_name = {'avgNormalTransportVelocity'}; vert_var_name = {'avgVertTransportVelocityTop'};fign=2 -% remaining: eddy-induced transport -%hor_var_name = {'avgNormalGMBolusVelocity'}; vert_var_name = {'avgVertGMBolusVelocityTop'};fign=3 - -var_conv_factor = [1 1 1]; % No conversion here. -if region=='Atlant' - contour_lims = [-10:2:10]; -elseif region=='global' - contour_lims = [-40:4:40]; - %contour_lims = [-20:4:-4 -2 2 4:4:20]; % for Bolus velocity MOS -end - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify latex command -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% This matlab script will invoke a latex compiler in order to -% produce a pdf file. Specify the unix command-line latex -% executable, or 'none' to not compile the latex document. - -latex_command = 'latex'; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify actions to be taken -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -if region=='Atlant' - find_edge_sections_flag = true ; - plot_edge_sections_flag = false ; - compute_transport_flag = true ; -elseif region=='global' - find_edge_sections_flag = true ; - plot_edge_sections_flag = true ; - compute_transport_flag = false ; -end -load_vertical_velocity_flag = true ; -compute_moc_flag = true ; -plot_moc_flag = true ; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Begin main code. Normally this does not need to change. -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -nSections = 1; - -for iSim = 1:length(sim) - - fprintf(['**** simulation: ' sim(iSim).dir '\n']) - fid_latex = fopen('temp.tex','w'); - fprintf(fid_latex,['%% file created by plot_mpas_cross_sections, ' date '\n\n']); - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % load vertical velocity - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if load_vertical_velocity_flag - [sim(iSim).avgVertVelocityTop, sim(iSim).botDepth, ... - sim(iSim).latCell,sim(iSim).lonCell, sim(iSim).areaCell,nVertLevels] = ... - load_vertical_velocity(wd,sim(iSim).dir,sim(iSim).netcdf_file,vert_var_name); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Find edges that connect beginning and end points of section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if find_edge_sections_flag - [sim(iSim).sectionEdgeIndex, sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sim(iSim).latSectionVertex,sim(iSim).lonSectionVertex, ... - sim(iSim).latVertexDeg,sim(iSim).lonVertexDeg] ... - = find_edge_sections(wd,sim(iSim).dir,sim(iSim).netcdf_file, ... - sectionText,sectionCoord); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot edge section locations on world map - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if plot_edge_sections_flag - sub_plot_edge_sections(sim(iSim).dir,sectionCoord, ... - sim(iSim).latSectionVertex,sim(iSim).lonSectionVertex, ... - sim(iSim).latVertexDeg,sim(iSim).lonVertexDeg, ... - sim(iSim).sectionEdgeIndex, sim(iSim).nEdgesInSection,... - fid_latex); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Load large variables from netcdf file for section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_transport_flag - [sim(iSim).sectionData] = load_large_variables_edge ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, hor_var_name, var_conv_factor, ... - sim(iSim).sectionEdgeIndex, sim(iSim).nEdgesInSection); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Compute transport through each section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_transport_flag - transport = compute_transport ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, hor_var_name, ... - sim(iSim).sectionEdgeIndex, sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sim(iSim).sectionData,sectionText,sectionAbbreviation); - else - transport = zeros(nVertLevels,nSections); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Compute MOC - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_moc_flag - if region=='Atlant' - [sim(iSim).landMask] = land_mask_Atlantic(sim(iSim).latCell,sim(iSim).lonCell); - elseif region=='global' - [sim(iSim).landMask] = land_mask_global(sim(iSim).latCell,sim(iSim).lonCell); - end - - [sim(iSim).mocTop] = compute_moc_from_w ... - (sim(iSim).avgVertVelocityTop, sim(iSim).botDepth, ... - sim(iSim).latCell,sim(iSim).lonCell, sim(iSim).areaCell,transport,mocLat,sim(iSim).landMask); - - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot MOC - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - if plot_moc_flag - plot_moc(sim(iSim).dir,sectionText,sim(iSim).mocTop,mocLat, ... - sim(iSim).botDepth,contour_lims,vert_var_name(1),fign) - end - -% doc_dir = ['docs/' regexprep(sim(iSim).dir,'/','_') '_' ... -% sim(iSim).netcdf_file '_dir' ]; -% unix(['mkdir -p ' doc_dir '/f']); -% unix(['mv f/*jpg ' doc_dir '/f']); - -% filename = [ regexprep(sim(iSim).dir,'/','_') '_' sim(iSim).netcdf_file '.tex']; -% unix(['cat mpas_sections.head.tex temp.tex > ' doc_dir '/' filename ]); - -% if not(strcmp(latex_command,'none')) -% fprintf('*** Compiling latex document \n') -% cd(doc_dir); -% unix([latex_command ' ' filename]); -% cd('../..'); -% end - -end % iSim - - diff --git a/visualization/moc/plot_moc.m b/visualization/moc/plot_moc.m deleted file mode 100644 index b8b990356..000000000 --- a/visualization/moc/plot_moc.m +++ /dev/null @@ -1,114 +0,0 @@ -function plot_moc(dir,sectionText,mocTop,mocLat,botDepth, ... - contour_lims,var_name, fign) - -% Plot cross-sections of MPAS fields. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% sectionText a cell array with text describing each section -% sectionID section numbers for each row of this page -% pageName name of this group of sections -% mocData(nVertLevels,nPoints,nSections,nVars) -% data in each cross-section for each variable -% depth(nVertLevels) depth of center of each layer, for plotting -% latSection(nPoints,nSections) lat coordinates of each section -% lonSection(nPoints,nSections) lon coordinates of each section -% coord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% plotDepth(nSections) depth to which to plot each section -% contour_lims(nVars,3) contour line definition: min, max, interval -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% fid_latex file ID of latex file - -fprintf(['** sub_plot_cross_sections simulation: ' dir ... - '\n']) - -px = [.54]; -py=.53; -pw = [.87]; ph=[.83]; % width and height of plots - -% smooth once, like POP: -[nVertLevels nLat] = size(mocTop); -mocTopSmooth(:,1) = mocTop(:,1); -mocTopSmooth(:,nLat) = mocTop(:,nLat); -for j=2:nLat-1 - mocTopSmooth(:,j) = (mocTop(:,j-1) + mocTop(:,j-1) + mocTop(:,j-1))/3; -end - -figure(fign); clf - - [cout,h]=contourf(mocLat,[0; botDepth],mocTopSmooth,... - contour_lims); - set(gca,'CLim',[min(contour_lims) max(contour_lims)]) - set(h,'LineColor',[.5 .5 .5]) - cbfrac=0; - hold on - - % Text labels on countours - [cout,h]=contour(mocLat,[0; botDepth],mocTopSmooth,... - contour_lims); - ls=[200]; - clabel(cout,h,'fontsize',10,'color','k','rotation',0,'LabelSpacing',ls); - set(h,'LineColor',[.5 .5 .5]) - - % Black lines - %[cout,h]=contour(mocLat,[0; botDepth],mocTopSmooth,[-100:100:100]); - %set(h,'LineColor',[0 0 0],'LineWidth',1) - - -%contour(mocLat,[0 botDepth],mocTopSmooth,[-15:2:20]) -set(gca,'YDir','reverse') -%colorbar -grid on -xlabel('latitude') -ylabel('depth') -title([char(sectionText(1)) ', Sv, ' dir ', ' char(var_name(1))],'Interpreter','none'); - - - % stretched colorbar using contour_lims: - cmin=min(contour_lims); - cmax=max(contour_lims); -% cvalue = cmin-.5*dc:dc:cmax+.5*dc; - nc_orig = 256; - nc = length(contour_lims); - cmap_orig = ColdHot(nc_orig); - cmap_orig_short = zeros(nc-1,3); - ind=(.5:1:nc-.5); - for j=1:nc-1 - cmap_orig_short(j,:) = cmap_orig( floor((j-.5)/(nc-1)*nc_orig),:); - end - - cvalue = linspace(cmin,cmax,256); - nc_inc = length(cvalue); - - cmapnew = zeros(nc_inc,3); - for jnew=2:nc_inc - jold = max(min(min(find(contour_lims>=cvalue(jnew))),nc)-1,1); - cmapnew(jnew-1,:) = cmap_orig_short(jold,:); - end - cmapnew(nc_inc,:) = cmap_orig_short(nc-1,:); - - colormap(cmapnew) - -h=colorbar; -set(h,'YTick',contour_lims); - - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 7 3.2]) -% subplot('position',[0 .95 1 .05]); axis off -% title_txt = [regexprep(char(var_name(iVar)),'_','\\_') ', ' regexprep(dir,'_','\\_')]; -% h=text(.55,.4,title_txt); -% set(h,'HorizontalAlignment','center','FontWeight','bold','FontSize',14) -% text(.005,.7,[ date ]); - - unix(['mkdir -p f/' dir ]); - tempTxt = char(sectionText(1)); - temp=['f/' dir '/' tempTxt(1:6) 'Moc_' char(var_name(1))]; - filename = regexprep(temp,'\.','_'); - print('-djpeg',[filename '.jpg']); - print('-depsc2',[filename '.eps']); - unix(['epstopdf ' filename '.eps --outfile=' filename '.pdf']); diff --git a/visualization/moc/sub_plot_edge_sections.m b/visualization/moc/sub_plot_edge_sections.m deleted file mode 100644 index 7f63357fd..000000000 --- a/visualization/moc/sub_plot_edge_sections.m +++ /dev/null @@ -1,117 +0,0 @@ -function sub_plot_edge_sections(dir,sectionCoord, ... - latSectionVertex,lonSectionVertex, ... - latVertexDeg,lonVertexDeg, ... - sectionEdgeIndex, nEdgesInSection,... - fid_latex) - -% Plot edge section locations on world map - -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 - -%%%%%%%%%% input arguments %%%%%%%%% -% dir text string, name of simulation -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% latVertexDeg(nVertices) lat arrays for all vertices -% lonVertexDeg(nVertices) lon arrays for all vertices -% sectionEdgeIndex(maxEdges,nSections) edge index of each section -% nEdgesInSection(nSections) number of edges in each section -% fid_latex file ID of latex file - -fprintf(['** sub_plot_edge_sections, on figure 1.\n']) - -nSections = size(sectionCoord,1); - -figure(1); clf - -if (min(lonVertexDeg)>-1e-8) - minLon = 0.0; - latTrans = 360; -else - minLon = -180.0; - latTrans = 0.0; -end - - % plot topo data of the earth. This is just low-rez one deg - % data for visual reference. - load('topo.mat','topo','topomap1'); - if minLon==-180 - topoNew(:,1:180) = topo(:,181:360); - topoNew(:,181:360) = topo(:,1:180); - image([-180 180],[-90 90],topoNew,'CDataMapping', 'scaled'); - else - image([0 360],[-90 90],topo,'CDataMapping', 'scaled'); - end - - colormap(topomap1); - set(gca,'YDir','normal') - - hold on - - % world - axis tight - set(gca,'XTick',30*[-10:12]) - set(gca,'YTick',15*[-20:20]) - - % half world -% axis([-240+latTrans 0+latTrans -80 70]) -% set(gca,'XTick',20*[-10:20]) -% set(gca,'YTick',10*[-20:20]) - - % N Atlantic -% axis([-90+latTrans -5+latTrans -5 70]) -% set(gca,'XTick',[-100:5:360]) -% set(gca,'YTick',[-90:5:90]) - - % Drake passage -% axis([-90+latTrans,-50+latTrans,-75,-50]) -% set(gca,'XTick',[-100:2:360]) - % set(gca,'YTick',[-200:2:200]) - - % Pacific -% axis([130 260 -10 10]) -% set(gca,'XTick',[0:1:300]) -% set(gca,'YTick',[-20:.1:20]) - - - % plot vertexs. This is just done for debugging. - plot(lonVertexDeg,latVertexDeg,'.y') - - grid on - - for iSection=1:nSections - latCoordDeg = [sectionCoord(iSection,1) sectionCoord(iSection,3)]; - lonCoordDeg = [sectionCoord(iSection,2) sectionCoord(iSection,4)]; - - h=plot([mod(lonCoordDeg,360)],[latCoordDeg],'*-'); - set(h,'Color','y','LineWidth',1) - h=plot([mod(lonCoordDeg(1),360)],[latCoordDeg(1)],'*k'); - - for i=1:nEdgesInSection(iSection) - h = line([lonSectionVertex(i,iSection) lonSectionVertex(i+1,iSection)],... - [latSectionVertex(i,iSection) latSectionVertex(i+1,iSection)]); - set(h,'Color','r','LineWidth',2) - %plot([lonVertexDeg(sectionVertexIndex(i+1,iSection))], ... - % [latVertexDeg(sectionVertexIndex(i+1,iSection))],'sk') - end - end - - ylabel('latitude') - xlabel('longitude') - title(['Domain: ' regexprep(dir,'_','\\_') ' Edges of transport sections. ']) - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 8 8]) - - subplot('position',[0 .95 1 .05]); axis off - text(.005,.7,[ date ]); - - dir_name1 = regexprep(dir,'\.','_'); - dir_name2 = regexprep(dir_name1,'/','_'); - filename=['f/' dir_name2 '_vertex_map' ]; - print('-djpeg',[filename '.jpg']); - - % put printing text in a latex file - fprintf(fid_latex,... - ['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); diff --git a/visualization/ncl/atm_cells.ncl b/visualization/ncl/atm_cells.ncl deleted file mode 100644 index da8dd652f..000000000 --- a/visualization/ncl/atm_cells.ncl +++ /dev/null @@ -1,145 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" - -begin - - r2d = 57.2957795 ; radians to degrees - - maxedges = 8 - - wks = gsn_open_wks("pdf","atm_cells") - gsn_define_colormap(wks,"BlAqGrYeOrReVi200") - - fname = getenv("FNAME") - f = addfile(fname,"r") - - nEdgesOnCell = f->nEdgesOnCell(:) - verticesOnCell = f->verticesOnCell(:,:) - verticesOnEdge = f->verticesOnEdge(:,:) - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - - res = True - res@gsnPaperOrientation = "portrait" - - res@sfXArray = x - res@sfYArray = y - - res@cnFillOn = True - res@cnFillMode = "RasterFill" - res@cnLinesOn = False - res@cnLineLabelsOn = False - res@cnInfoLabelOn = False - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = "CylindricalEquidistant" -; res@mpProjection = "Orthographic" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 0. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - ; - ; The purpose of this section is simply to set up a graphic ('map') - ; that uses the projection specified above, and over which we - ; can draw polygons - ; - h = f->areaCell(:) - sizes = dimsizes(h) - nCells = sizes(0) - xpoly = new((/maxedges/), "double") - ypoly = new((/maxedges/), "double") - res@cnConstFLabelOn = False - res@lbLabelBarOn = False - map = gsn_csm_contour_map(wks,h,res) - - t = stringtointeger(getenv("T")) - - ; - ; Set the field to be plotted here - ; - pres = True - h = f->qv(t,:,0) - minfld = min(h) - maxfld = max(h) - fldrange = maxfld - minfld - do iCell=0,nCells-1 - do i=0,nEdgesOnCell(iCell)-1 - xpoly(i) = lonVertex(verticesOnCell(iCell,i)-1) - ypoly(i) = latVertex(verticesOnCell(iCell,i)-1) - if (i .gt. 0) then - if (abs(xpoly(i) - xpoly(0)) .gt. 180.0) then - if (xpoly(i) .gt. xpoly(0)) then - xpoly(i) = xpoly(i) - 360.0 - else - xpoly(i) = xpoly(i) + 360.0 - end if - end if - end if - end do - pres@gsFillColor = doubletointeger(198*(h(iCell) - minfld)/fldrange+2) - gsn_polygon(wks,map,xpoly(0:nEdgesOnCell(iCell)-1),ypoly(0:nEdgesOnCell(iCell)-1),pres); - end do - - - ; - ; Draw label bar - ; - - xcb = new((/4/), "float") - ycb = new((/4/), "float") - - tres = True - tres@txAngleF = 90.0 - tres@txFontHeightF = 0.015 - do i=2,200 - xcb(0) = 0.125 + i*0.75/198 - ycb(0) = 0.11 - - xcb(1) = 0.125 + (i+1)*0.75/198 - ycb(1) = 0.11 - - xcb(2) = 0.125 + (i+1)*0.75/198 - ycb(2) = 0.16 - - xcb(3) = 0.125 + i*0.75/198 - ycb(3) = 0.16 - - tres@gsFillColor = i - - gsn_polygon_ndc(wks,xcb,ycb,tres); - - j = (i-2) % 20 - if ((j .eq. 0) .or. (i .eq. 200)) then - ff = minfld + int2flt(i-2) * fldrange / 198.0 - label = sprintf("%5.3g", ff) - gsn_text_ndc(wks, label, xcb(0), 0.060, tres) - end if - - end do - - mres = True - mres@mpCenterLatF = 0. - mres@mpCenterLonF = 0. - mres@mpGridAndLimbOn = False - mres@mpOutlineOn = True - mres@mpFillOn = False - mres@mpPerimOn = False - mres@gsnFrame = False - mapo = gsn_csm_map(wks,mres) - - frame(wks) - -end - diff --git a/visualization/ncl/atm_contours.ncl b/visualization/ncl/atm_contours.ncl deleted file mode 100644 index 856837f68..000000000 --- a/visualization/ncl/atm_contours.ncl +++ /dev/null @@ -1,152 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - - ; - ; Which field to plot - ; - plotfield = "h" -; plotfield = "ke" -; plotfield = "vorticity" - - ; - ; Whether to plot wind vectors - ; -; winds = True - winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; -; filled = True - filled = False - - ; - ; The (lat,lon) the plot is to be centered over - ; - cenLat = 0.0 - cenLon = 0.0 - - ; - ; Projection to use for plot - ; -; projection = "Orthographic" - projection = "CylindricalEquidistant" - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","atm_contours") - gsn_define_colormap(wks,"gui_default") - - fname = getenv("FNAME") - f = addfile(fname,"r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - if (plotfield .eq. "h" .or. plotfield .eq. "ke") then - res@sfXArray = lonCell - res@sfYArray = latCell - end if - if (plotfield .eq. "vorticity") then - res@sfXArray = lonVertex - res@sfYArray = latVertex - end if - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - -; res@cnLevelSpacingF = 50.0 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = projection - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = cenLat - res@mpCenterLonF = cenLon - res@mpGridAndLimbOn = True - res@mpGridAndLimbDrawOrder = "PreDraw" - res@mpGridLineColor = "Background" - res@mpOutlineOn = True - res@mpDataBaseVersion = "Ncarg4_1" - res@mpDataSetName = "Earth..3" - res@mpOutlineBoundarySets = "Geophysical" - res@mpFillOn = False - res@mpPerimOn = True - res@gsnFrame = False - res@cnLineThicknessF = 2.0 - res@cnLineColor = "NavyBlue" - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "h") then -; fld = f->xice(t,:) -; fld = f->sst(t,:) -; fld = f->surface_pressure(t,:) -; fld = f->pressure_base(t,:,25) + f->pressure_p(t,:,25) - fld = f->theta(t,:,25) - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,0) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,0) - end if - res@cnLineDashPattern = 0 - map = gsn_csm_contour_map(wks,fld,res) - - if (winds) then - u = f->u(t,:,0) - v = f->v(t,:,0) - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - end if - - frame(wks) - -end - diff --git a/visualization/ncl/atm_mesh.ncl b/visualization/ncl/atm_mesh.ncl deleted file mode 100644 index e68aaba8d..000000000 --- a/visualization/ncl/atm_mesh.ncl +++ /dev/null @@ -1,80 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - - r2d = 57.2957795 ; radians to degrees - - wks = gsn_open_wks("pdf","atm_mesh") - - colors = (/"white","black","lightskyblue1","lightskyblue1","bisque"/) -; colors = (/"white","black","white","white","grey90"/) - gsn_define_colormap(wks,colors) - - fname = getenv("FNAME") - f = addfile(fname,"r") - - xVertex = f->xVertex(:) - yVertex = f->yVertex(:) - zVertex = f->zVertex(:) - verticesOnCell = f->verticesOnCell(:,:) - verticesOnEdge = f->verticesOnEdge(:,:) - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - - res = True - res@gsnMaximize = True - - res@mpProjection = "Orthographic" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 50. - res@mpCenterLonF = -100. - res@mpCenterRotF = -100. - res@mpGridAndLimbOn = False - res@mpOutlineOn = True - res@mpFillOn = True - res@mpPerimOn = False - res@gsnFrame = False - res@mpOceanFillColor = 3 - res@mpInlandWaterFillColor = 3 - res@mpLandFillColor = 4 - - map = gsn_csm_map(wks,res) - - lres = True - lres@gsLineThicknessF = 0.10 - - esizes = dimsizes(latEdge) - ecx = new((/esizes(0),2/),double) - ecy = new((/esizes(0),2/),double) - do j=0,esizes(0)-1 - ecy(j,0) = latVertex(verticesOnEdge(j,0)-1) - ecx(j,0) = lonVertex(verticesOnEdge(j,0)-1) - ecy(j,1) = latVertex(verticesOnEdge(j,1)-1) - ecx(j,1) = lonVertex(verticesOnEdge(j,1)-1) - end do - - do j=0,esizes(0)-1 - if (abs(ecx(j,0) - ecx(j,1)) .gt. 180.0) then - if (ecx(j,0) .gt. ecx(j,1)) then - ecx(j,0) = ecx(j,0) - 360.0 - else - ecx(j,1) = ecx(j,1) - 360.0 - end if - end if - end do - - do j=0,esizes(0)-1 - gsn_polyline(wks,map,ecx(j,:),ecy(j,:),lres) - end do - - frame(wks) - -end - diff --git a/visualization/ncl/atm_xsec.ncl b/visualization/ncl/atm_xsec.ncl deleted file mode 100644 index e90fcc65e..000000000 --- a/visualization/ncl/atm_xsec.ncl +++ /dev/null @@ -1,373 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - pi = 3.14159265 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - ; - ; Which field to plot - ; -; plotfield = "w" - plotfield = "theta" -; plotfield = "ke" -; plotfield = "vorticity" - - - ; - ; Whether to plot horizontal wind vectors - ; -; horiz_winds = True - horiz_winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; - filled = True -; filled = False - - ; - ; Starting and ending locations (in degrees) - ; Exercise caution when setting these: setting start_lon=90.0 and end_lon=-90.0 - ; would create a cross-section including the prime meridian, whereas setting - ; start_lon=90.0 and end_lon=270.0 would create a cross-section containing - ; the date line, for example. - ; - ; - start_lat = 40.0 - start_lon = -140.0 - end_lat = 40.0 - end_lon = -80.0 - - ; - ; The number of points along the cross section - ; - nsec = 250 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - wks = gsn_open_wks("pdf","atm_xsec") - gsn_define_colormap(wks,"BlAqGrYeOrReVi200") - - fname = getenv("FNAME") - f = addfile(fname,"r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - xCell = f->xCell(:) - yCell = f->yCell(:) - zCell = f->zCell(:) - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - xVertex = f->xVertex(:) - yVertex = f->yVertex(:) - zVertex = f->zVertex(:) - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - xEdge = f->xEdge(:) - yEdge = f->yEdge(:) - zEdge = f->zEdge(:) - zgrid = f->zgrid(:,:) / 1000.0 - verticesOnCell = f->verticesOnCell(:,:) - edgesOnCell = f->edgesOnCell(:,:) - nCellsOnCell = f->nEdgesOnCell(:) - cellsOnCell = f->cellsOnCell(:,:) - alpha = f->angleEdge(:) - - dims = dimsizes(latCell) - nCells = dims(0) - - start_lat = start_lat / r2d - start_lon = start_lon / r2d - end_lat = end_lat / r2d - end_lon = end_lon / r2d - - radius = 6371220.0 - xsec_latitude = start_lat - xsec_longitude = start_lon - xsec_lat_inc = (end_lat - start_lat) / (int2flt(nsec) - 1.0) - xsec_lon_inc = (end_lon - start_lon) / (int2flt(nsec) - 1.0) - - xsecx = new((/nsec/),float) - xsecy = new((/nsec/),float) - xsecz = new((/nsec/),float) - xsec_cell_id = new((/nsec/),integer) - xsec_edge_id = new((/nsec/),integer) - xsec_vtx_id = new((/nsec/),integer) - xsec_id = new((/nsec/),integer) - - ; Compute (x,y,z) coordinates for points on cross section - do i=0,nsec-1 - xsecx(i) = radius * cos(xsec_longitude) * cos(xsec_latitude) - xsecy(i) = radius * sin(xsec_longitude) * cos(xsec_latitude) - xsecz(i) = radius * sin(xsec_latitude) - xsec_latitude = xsec_latitude + xsec_lat_inc - xsec_longitude = xsec_longitude + xsec_lon_inc - end do - - ; Find cell containing first cross section point - dmin = 2.0 * radius - cellmin = -1 - do i=0,nCells-1 - d = sqrt((xCell(i) - xsecx(0))^2.0 + (yCell(i) - xsecy(0))^2.0 + (zCell(i) - xsecz(0))^2.0) - if (d .lt. dmin) then - cellmin = i - dmin = doubletofloat(d) - end if - end do - xsec_cell_id(0) = cellmin - - ; For the remaining cross section points, find the grid cell containing them - do j=1,nsec-1 - moved = 1 - do while (moved .ne. 0) - moved = 0 - d = sqrt((xCell(cellmin) - xsecx(j))^2.0 + (yCell(cellmin) - xsecy(j))^2.0 + (zCell(cellmin) - xsecz(j))^2.0) - do k=0,nCellsOnCell(cellmin)-1 - dn = sqrt((xCell(cellsOnCell(cellmin,k)-1) - xsecx(j))^2.0 + (yCell(cellsOnCell(cellmin,k)-1) - xsecy(j))^2.0 + (zCell(cellsOnCell(cellmin,k)-1) - xsecz(j))^2.0) - if (dn .lt. d) then - d = dn - nearest = (/cellsOnCell(cellmin,k)/)-1 - moved = 1 - end if - end do - if (moved .eq. 1) then - cellmin = nearest - end if - end do - xsec_cell_id(j) = cellmin - end do - - ; For all cross section points, find the nearest vertex and edge - do i=0,nsec-1 - iVtx = verticesOnCell(xsec_cell_id(i),0) - 1 - iEdge = edgesOnCell(xsec_cell_id(i),0) - 1 - xsec_edge_id(i) = iEdge - xsec_vtx_id(i) = iVtx - de = sqrt((xEdge(iEdge) - xsecx(i))^2.0 + (yEdge(iEdge) - xsecy(i))^2.0 + (zEdge(iEdge) - xsecz(i))^2.0) - dv = sqrt((xVertex(iVtx) - xsecx(i))^2.0 + (yVertex(iVtx) - xsecy(i))^2.0 + (zVertex(iVtx) - xsecz(i))^2.0) - do j=1,nCellsOnCell(xsec_cell_id(i))-1 - iVtx = verticesOnCell(xsec_cell_id(i),j) - 1 - iEdge = edgesOnCell(xsec_cell_id(i),j) - 1 - de_test = sqrt((xEdge(iEdge) - xsecx(i))^2.0 + (yEdge(iEdge) - xsecy(i))^2.0 + (zEdge(iEdge) - xsecz(i))^2.0) - dv_test = sqrt((xVertex(iVtx) - xsecx(i))^2.0 + (yVertex(iVtx) - xsecy(i))^2.0 + (zVertex(iVtx) - xsecz(i))^2.0) - if (de_test .lt. de) then - de = de_test - xsec_edge_id(i) = iEdge - end if - if (dv_test .lt. dv) then - dv = dv_test - xsec_vtx_id(i) = iVtx - end if - end do - end do - - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - ; At this point, xsec_cell_id(:), xsec_edge_id(:), and xsec_vtx_id(:) contains the cell, edge, - ; and vertex IDs of the nearest points to those along the cross section - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - res = True - res@gsnMaximize = False - res@gsnSpreadColors = True - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - - res@cnLevelSpacingF = 0.01 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@gsnFrame = False - - - ; - ; Select field to be plotted, and set generic array xsec_id(:) to contain IDs of - ; locations (cell, edge, or vertex) in that field containg cross section points - ; - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "w") then - fld1 = f->w(t,:,:) - ldims = dimsizes(fld1) - fld = new((/ldims(0),ldims(1)-1/),"double") - ; Average w to center of layers - do i=0,ldims(0)-1 - do j=0,ldims(1)-2 - fld(i,j) = 0.5*(fld1(i,j)+fld1(i,j+1)) - end do - end do - nVertLevels = ldims(1) - nVertLevels = nVertLevels-1 - xsec_id(:) = xsec_cell_id(:) - end if - if (plotfield .eq. "theta") then - fld = f->theta(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - xsec_id(:) = xsec_cell_id(:) - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - xsec_id(:) = xsec_cell_id(:) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - xsec_id(:) = xsec_vtx_id(:) - end if - res@cnLineDashPattern = 0 - - height1 = new((/nVertLevels+1,nsec/),float) - height = new((/nVertLevels+1,nsec+1/),float) - x = new((/nVertLevels+1,nsec+1/),float) - - ; Extract field from along cross section into plotting array - arr = new((/nVertLevels,nsec/),float) - do i=0,nsec-1 - do j=0,nVertLevels-1 -; arr(j,i) = 0.5*doubletofloat(fld(xsec_id(i),j)+fld(xsec_id(i),j+1)) - arr(j,i) = doubletofloat(fld(xsec_id(i),j)) - height1(j,i) = doubletofloat(zgrid(xsec_id(i),j)) - end do - j = nVertLevels - height1(j,i) = doubletofloat(zgrid(xsec_id(i),j)) - end do - - do j=0,nVertLevels - x(j,nsec) = int2flt(nsec) + 0.5 - x(j,0) = 0.5 - height(j,0) = height1(j,0) - height(j,nsec) = height1(j,nsec-1) - end do - - do i=1,nsec-1 - do j=0,nVertLevels - height(j,i) = 0.5*(height1(j,i) + height1(j,i-1)) - x(j,i) = int2flt(i) + 0.5 - end do - end do - - xpoly = new((/5/), "float") - ypoly = new((/5/), "float") - - minfld = min(arr) - maxfld = max(arr) - fldrange = maxfld - minfld - - res@trYMinF = min(zgrid) - res@trYMaxF = max(zgrid) - res@trXMinF = int2flt(0) - res@trXMaxF = int2flt(nsec+1) - - res@tiYAxisString = "z(km)" - res@tiYAxisFontHeightF = 0.017 - res@tiXAxisString = "cell" - res@tiXAxisFontHeightF = 0.017 - - map = gsn_csm_xy(wks,x,height,res) - - do i=0,nsec-1 - do j=0,nVertLevels-1 - xpoly(0) = x(j,i) - xpoly(1) = x(j,i+1) - xpoly(2) = x(j+1,i+1) - xpoly(3) = x(j+1,i) - xpoly(4) = x(j,i) - - ypoly(0) = height(j,i) - ypoly(1) = height(j,i+1) - ypoly(2) = height(j+1,i+1) - ypoly(3) = height(j+1,i) - ypoly(4) = height(j,i) - - res@gsFillColor = doubletointeger(195*(arr(j,i) - minfld)/fldrange+2) - gsn_polygon(wks,map,xpoly,ypoly,res); - end do - end do - - if (horiz_winds) then - u = f->u(t,:,:) - v = f->v(t,:,:) - esizes = dimsizes(u) - nVertLevels = esizes(1) - u_earth = new((/nVertLevels,nsec/),float) - v_earth = new((/nVertLevels,nsec/),float) - x_edge = new((/nVertLevels,nsec/),float) - y_edge = new((/nVertLevels,nsec/),float) - do i=0,nsec-1 - do j=0,nVertLevels-1 - u_earth(j,i) = doubletofloat(u(xsec_edge_id(i),j)*cos(alpha(xsec_edge_id(i))) - v(xsec_edge_id(i),j)*sin(alpha(xsec_edge_id(i)))) - v_earth(j,i) = doubletofloat(u(xsec_edge_id(i),j)*sin(alpha(xsec_edge_id(i))) + v(xsec_edge_id(i),j)*cos(alpha(xsec_edge_id(i)))) - x_edge(j,i) = i - y_edge(j,i) = j - end do - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",50.0) - wmsetp("VCW",0.10) - - wmvect(wks, x_edge, y_edge, u_earth, v_earth) - end if - - ; - ; Draw label bar - ; - - xcb = new((/4/), "float") - ycb = new((/4/), "float") - - tres = True - tres@txAngleF = 90.0 - tres@txFontHeightF = 0.013 - do i=2,200 - xcb(0) = 0.125 + i*0.75/198 - ycb(0) = 0.08 - - xcb(1) = 0.125 + (i+1)*0.75/198 - ycb(1) = 0.08 - - xcb(2) = 0.125 + (i+1)*0.75/198 - ycb(2) = 0.10 - - xcb(3) = 0.125 + i*0.75/198 - ycb(3) = 0.10 - - tres@gsFillColor = i - - gsn_polygon_ndc(wks,xcb,ycb,tres); - - j = (i-2) % 20 - if ((j .eq. 0) .or. (i .eq. 200)) then - ff = minfld + int2flt(i-2) * fldrange / 198.0 - label = sprintf("%8.3g", ff) - gsn_text_ndc(wks, label, xcb(0), 0.050, tres) - end if - - end do - - frame(wks) - -end - diff --git a/visualization/ncl/cells.ncl b/visualization/ncl/cells.ncl deleted file mode 100644 index 94f55be35..000000000 --- a/visualization/ncl/cells.ncl +++ /dev/null @@ -1,144 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - - ; - ; Which field to plot - ; - plotfield = "h" -; plotfield = "ke" -; plotfield = "vorticity" - - ; - ; Whether to plot wind vectors - ; -; winds = True - winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; -; filled = True - filled = False - - ; - ; The (lat,lon) the plot is to be centered over - ; - cenLat = 0.0 - cenLon = 0.0 - - ; - ; Projection to use for plot - ; -; projection = "Orthographic" - projection = "CylindricalEquidistant" - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - if (plotfield .eq. "h" .or. plotfield .eq. "ke") then - res@sfXArray = lonCell - res@sfYArray = latCell - end if - if (plotfield .eq. "vorticity") then - res@sfXArray = lonVertex - res@sfYArray = latVertex - end if - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - - res@cnLevelSpacingF = 50.0 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = projection - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = cenLat - res@mpCenterLonF = cenLon - res@mpGridAndLimbOn = True - res@mpGridAndLimbDrawOrder = "PreDraw" - res@mpGridLineColor = "Background" - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "h") then - h = f->h(t,:,0) - hs = f->h_s(:) - fld = h + hs - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,0) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,0) - end if - res@cnLineDashPattern = 0 - map = gsn_csm_contour_map(wks,fld,res) - - if (winds) then - u = f->u(t,:,0) - v = f->v(t,:,0) - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - end if - - frame(wks) - -end - diff --git a/visualization/ncl/cells_hyd0.ncl b/visualization/ncl/cells_hyd0.ncl deleted file mode 100644 index 090711cbf..000000000 --- a/visualization/ncl/cells_hyd0.ncl +++ /dev/null @@ -1,159 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - - ; - ; Which field to plot - ; - plotfield = "h" -; plotfield = "ke" -; plotfield = "vorticity" - - ; - ; Whether to plot wind vectors - ; -; winds = True - winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; - filled = True -; filled = False - - ; - ; The (lat,lon) the plot is to be centered over - ; - cenLat = 0.0 - cenLon = 180.0 - - ; - ; Projection to use for plot - ; -; projection = "Orthographic" - projection = "CylindricalEquidistant" - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - -; wks_type = "pdf" -; wks_type@wkOrientation = "landscape" -; wks = gsn_open_wks(wks_type,"cells") - - wks = gsn_open_wks("pdf","cells") -; wks = gsn_open_wks("x11","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - if (plotfield .eq. "h" .or. plotfield .eq. "ke") then - res@sfXArray = lonCell - res@sfYArray = latCell - end if - if (plotfield .eq. "vorticity") then - res@sfXArray = lonVertex - res@sfYArray = latVertex - end if - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True -; res@cnLinesOn = False -; res@cnRasterModeOn = True - res@cnLinesOn = True - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - -; res@cnLevelSpacingF = 10.0 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = projection - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = cenLat - res@mpCenterLonF = cenLon - res@mpGridAndLimbOn = True - res@mpGridAndLimbDrawOrder = "PreDraw" - res@mpGridLineColor = "Background" - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "h") then -; h = f->h(t,:,0) -; hs = f->h_s(:) -; fld = h + hs -; h = f->ww(t,:,5) -; h = (f->surface_pressure(t,:) + 219.4)/100. -; h = f->geopotential(t,:,18) -; h = f->theta(t,:,25)-f->theta(0,:,25) -; h = f->theta(t,:,18) - h = f->surface_pressure(t,:)/100. -; h = (f->surface_pressure(t,:)-f->surface_pressure(0,:))/100. - fld = h - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,18) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,0) - end if - res@cnLineDashPattern = 0 - map = gsn_csm_contour_map(wks,fld,res) - - if (winds) then - u = f->u(t,:,25) - f->u(0,:,25) - v = f->v(t,:,25) - f->v(0,:,25) - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - end if - - frame(wks) - -end - diff --git a/visualization/ncl/contours.ncl b/visualization/ncl/contours.ncl deleted file mode 100644 index 3f6625509..000000000 --- a/visualization/ncl/contours.ncl +++ /dev/null @@ -1,100 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - cellArea = f->areaCell(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True -; res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnLinesOn = True - res@cnLineLabelsOn = False - res@cnLevelSelectionMode = "AutomaticLevels" - res@cnLevelSpacingF = 100. - res@cnMonoLineDashPattern = True - res@cnLineDashPattern = 2 - res@cnInfoLabelOn = False - - res@lbBoxLinesOn = False - - res@mpProjection = "Orthographic" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 270. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpPerimOn = True - res@mpAreaMaskingOn = True - res@mpMaskAreaSpecifiers = "Land" - res@gsnFrame = False - - h0 = f->h(0,:,0) - print("Min/max of initial height field: "+min(h0)+", "+max(h0)) - map0 = gsn_csm_contour_map(wks,h0,res) - - t = stringtointeger(getenv("T")) - res@cnLineDashPattern = 0 - h = f->h(t,:,0) - print("Min/max of height field after one rotation: "+min(h)+", "+max(h)) - map1 = gsn_csm_contour_map(wks,h,res) - tres = False - - frame(wks) - - hdiff = h - h0 - print("Min/max error after one rotation: "+min(hdiff)+", "+max(hdiff)) - map1 = gsn_csm_contour_map(wks,hdiff,res) - - frame(wks) - - cellsizes = dimsizes(h) - intgrl = 0.0D - do i=0,cellsizes(0)-1 - intgrl = intgrl + h0(i)*cellArea(i) - end do - print("Original integrated volume: "+intgrl) - intgrl = 0.0D - do i=0,cellsizes(0)-1 - intgrl = intgrl + h(i)*cellArea(i) - end do - print("Integrated volume after one rotation: "+intgrl) - -end - diff --git a/visualization/ncl/fill.ncl b/visualization/ncl/fill.ncl deleted file mode 100644 index f7f07daa6..000000000 --- a/visualization/ncl/fill.ncl +++ /dev/null @@ -1,121 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 8 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnFillMode = "RasterFill" - - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - -; res@cnFillOn = False -; res@cnLinesOn = True -; res@cnLineLabelsOn = True - -; res@cnMaxLevelCount = 22 -; res@cnLevelSelectionMode = "ManualLevels" -; res@cnMinLevelValF = -100. -; res@cnMaxLevelValF = 1000. - res@cnLevelSpacingF = 50. - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - -; res@mpProjection = "Satellite" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 0. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - u = f->u(t,:,0) - v = f->v(t,:,0) - h = f->h(t,:,0) - hs = f->h_s(:) -; hsizes = dimsizes(h) -; h_total = new((/hsizes(1)/),double) -; h_total = 0.0 -; do i=1,hsizes(1) -; h_total(:) = h_total(:) + h(:) -; end do -; h_total(:) = h_total(:) + hs(:) - h(:) = h(:) + hs(:) - res@cnLineDashPattern = 0 - map = gsn_csm_contour_map(wks,h,res) - -; res@cnLineDashPattern = 11 -; map = gsn_csm_contour_map(wks,hs,res) - frame(wks) - -; esizes = dimsizes(u) -; u_earth = new(dimsizes(u),float) -; v_earth = new(dimsizes(u),float) -; lat_edge = new(dimsizes(u),float) -; lon_edge = new(dimsizes(u),float) -; do i=0,esizes(0)-1 -; j = i % 20 -; if (j .eq. 0) then -; u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) -; v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) -; else -; u_earth(i) = -999. -; v_earth(i) = -999. -; end if -; lat_edge(i) = doubletofloat(latEdge(i)) -; lon_edge(i) = doubletofloat(lonEdge(i)) -; end do - -; wmsetp("VCH",0.003) -; wmsetp("VRN",0.010) -; wmsetp("VRS",20.0) -; wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - -; frame(wks) -end - diff --git a/visualization/ncl/tc2_cells.ncl b/visualization/ncl/tc2_cells.ncl deleted file mode 100644 index 74b7118fc..000000000 --- a/visualization/ncl/tc2_cells.ncl +++ /dev/null @@ -1,109 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("x11","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnFillOn = True - res@cnFillMode = "RasterFill" - res@cnLinesOn = False - res@cnLineLabelsOn = False -; res@cnMaxLevelCount = 22 - res@cnLevelSelectionMode = "ManualLevels" - res@cnMinLevelValF = 1000. - res@cnMaxLevelValF = 3000. - res@cnLevelSpacingF = 100. - res@cnInfoLabelOn = False - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@mpProjection = "Stereographic" - res@mpLimitMode = "LatLon" - res@mpMinLonF = 0.0 - res@mpMaxLonF = 360.0 - res@mpMinLatF = 0.0 - res@mpMaxLatF = 90.0 - - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 90. - res@mpCenterLonF = 270. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - u = f->u(t,:,0) - v = f->v(t,:,0) - h = f->h(t,:,0) - map = gsn_csm_contour_map(wks,h,res) - - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - j = i % 20 - if (j .eq. 0) then -; u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) -; v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - u_earth(i) = doubletofloat(u(i)) - v_earth(i) = doubletofloat(v(i)) - else - u_earth(i) = -999. - v_earth(i) = -999. - end if - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.003) - wmsetp("VRN",0.010) - wmsetp("VRS",20.0) - wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - - frame(wks) -end - diff --git a/visualization/ncl/tc2_contours.ncl b/visualization/ncl/tc2_contours.ncl deleted file mode 100644 index 6b4eef125..000000000 --- a/visualization/ncl/tc2_contours.ncl +++ /dev/null @@ -1,100 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 8 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - cellArea = f->areaCell(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True -; res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnLinesOn = True - res@cnLineLabelsOn = False - res@cnLevelSelectionMode = "AutomaticLevels" - res@cnLevelSpacingF = 100. - res@cnMonoLineDashPattern = True - res@cnLineDashPattern = 2 - res@cnInfoLabelOn = False - - res@lbBoxLinesOn = False - - res@mpProjection = "Stereographic" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 90. - res@mpCenterLonF = 270. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpPerimOn = True - res@mpAreaMaskingOn = True - res@mpMaskAreaSpecifiers = "Land" - res@gsnFrame = False - - h0 = f->h(0,:,0) - print("Min/max of initial height field: "+min(h0)+", "+max(h0)) - map0 = gsn_csm_contour_map(wks,h0,res) - - t = stringtointeger(getenv("T")) - res@cnLineDashPattern = 0 - h = f->h(t,:,0) - print("Min/max of height field after one rotation: "+min(h)+", "+max(h)) - map1 = gsn_csm_contour_map(wks,h,res) - tres = False - - frame(wks) - - hdiff = h - h0 - print("Min/max error after one rotation: "+min(hdiff)+", "+max(hdiff)) - map1 = gsn_csm_contour_map(wks,hdiff,res) - - frame(wks) - - cellsizes = dimsizes(h) - intgrl = 0.0D - do i=0,cellsizes(0)-1 - intgrl = intgrl + h0(i)*cellArea(i) - end do - print("Original integrated volume: "+intgrl) - intgrl = 0.0D - do i=0,cellsizes(0)-1 - intgrl = intgrl + h(i)*cellArea(i) - end do - print("Integrated volume after one rotation: "+intgrl) - -end - diff --git a/visualization/ncl/tracer1.ncl b/visualization/ncl/tracer1.ncl deleted file mode 100644 index 63b018ed3..000000000 --- a/visualization/ncl/tracer1.ncl +++ /dev/null @@ -1,101 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnFillOn = True - res@cnFillMode = "RasterFill" - res@cnLinesOn = False - res@cnLineLabelsOn = False -; res@cnMaxLevelCount = 22 -; res@cnLevelSelectionMode = "ManualLevels" -; res@cnMinLevelValF = -100. -; res@cnMaxLevelValF = 1000. - res@cnLevelSpacingF = 50. - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - -; res@mpProjection = "Satellite" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 0. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - u = f->u(t,:,0) - v = f->v(t,:,0) - h = f->tracers(t,:,0,0) - map = gsn_csm_contour_map(wks,h,res) - - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - j = i % 20 - if (j .eq. 0) then - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - else - u_earth(i) = -999. - v_earth(i) = -999. - end if - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.003) - wmsetp("VRN",0.010) - wmsetp("VRS",20.0) -; wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - - frame(wks) -end - diff --git a/visualization/ncl/tracer2.ncl b/visualization/ncl/tracer2.ncl deleted file mode 100644 index 4a98ccc3f..000000000 --- a/visualization/ncl/tracer2.ncl +++ /dev/null @@ -1,101 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - - maxedges = 7 - - wks = gsn_open_wks("pdf","cells") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - x = f->lonCell(:) * r2d - y = f->latCell(:) * r2d - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - verticesOnCell = f->verticesOnCell(:,:) - alpha = f->angleEdge(:) - - sizes = dimsizes(verticesOnCell) - - cx = new((/sizes(0),maxedges/),double) - cy = new((/sizes(0),maxedges/),double) - - do j=0,sizes(0)-1 - do i=0,maxedges-2 - cy(j,i) = latVertex(verticesOnCell(j,i)-1) - cx(j,i) = lonVertex(verticesOnCell(j,i)-1) - end do - cx(j,maxedges-1) = cx(j,0) - cy(j,maxedges-1) = cy(j,0) - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@sfXArray = x - res@sfYArray = y - res@sfXCellBounds = cx - res@sfYCellBounds = cy - - res@cnFillOn = True - res@cnFillMode = "RasterFill" - res@cnLinesOn = False - res@cnLineLabelsOn = False -; res@cnMaxLevelCount = 22 -; res@cnLevelSelectionMode = "ManualLevels" -; res@cnMinLevelValF = -100. -; res@cnMaxLevelValF = 1000. - res@cnLevelSpacingF = 50. - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - -; res@mpProjection = "Satellite" - res@mpDataBaseVersion = "MediumRes" - res@mpCenterLatF = 0. - res@mpCenterLonF = 0. - res@mpGridAndLimbOn = False - res@mpOutlineOn = False - res@mpFillOn = False - res@mpPerimOn = False - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - u = f->u(t,:,0) - v = f->v(t,:,0) - h = f->tracers(t,:,0,1) - map = gsn_csm_contour_map(wks,h,res) - - esizes = dimsizes(u) - u_earth = new(dimsizes(u),float) - v_earth = new(dimsizes(u),float) - lat_edge = new(dimsizes(u),float) - lon_edge = new(dimsizes(u),float) - do i=0,esizes(0)-1 - j = i % 20 - if (j .eq. 0) then - u_earth(i) = doubletofloat(u(i)*cos(alpha(i)) - v(i)*sin(alpha(i))) - v_earth(i) = doubletofloat(u(i)*sin(alpha(i)) + v(i)*cos(alpha(i))) - else - u_earth(i) = -999. - v_earth(i) = -999. - end if - lat_edge(i) = doubletofloat(latEdge(i)) - lon_edge(i) = doubletofloat(lonEdge(i)) - end do - - wmsetp("VCH",0.003) - wmsetp("VRN",0.010) - wmsetp("VRS",20.0) -; wmvectmap(wks, lat_edge, lon_edge, u_earth, v_earth) - - frame(wks) -end - diff --git a/visualization/ncl/xsec.ncl b/visualization/ncl/xsec.ncl deleted file mode 100644 index 73e2ed7cb..000000000 --- a/visualization/ncl/xsec.ncl +++ /dev/null @@ -1,235 +0,0 @@ -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" -load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" - -begin - r2d = 57.2957795 ; radians to degrees - pi = 3.14159265 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - ; - ; Which field to plot - ; - plotfield = "h" -; plotfield = "ke" -; plotfield = "vorticity" - - - ; - ; Whether to plot horizontal wind vectors - ; -; horiz_winds = True - horiz_winds = False - - ; - ; Whether to do color-filled plot (filled=True) or - ; to plot contours of height field (filled=False) - ; - filled = True -; filled = False - - ; - ; The longitude of the pole-to-pole cross section - ; - xsec_longitude = -1.0 * pi / 6.0 - - ; - ; The number of points along the cross section - ; - nsec = 200 - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - - - wks = gsn_open_wks("pdf","xsec") - gsn_define_colormap(wks,"gui_default") - - f = addfile("output.nc","r") - - lonCell = f->lonCell(:) * r2d - latCell = f->latCell(:) * r2d - xCell = f->xCell(:) - yCell = f->yCell(:) - zCell = f->zCell(:) - lonVertex = f->lonVertex(:) * r2d - latVertex = f->latVertex(:) * r2d - xVertex = f->xVertex(:) - yVertex = f->yVertex(:) - zVertex = f->zVertex(:) - lonEdge = f->lonEdge(:) * r2d - latEdge = f->latEdge(:) * r2d - xEdge = f->xEdge(:) - yEdge = f->yEdge(:) - zEdge = f->zEdge(:) - verticesOnCell = f->verticesOnCell(:,:) - edgesOnCell = f->edgesOnCell(:,:) - nCellsOnCell = f->nEdgesOnCell(:) - cellsOnCell = f->cellsOnCell(:,:) - alpha = f->angleEdge(:) - - dims = dimsizes(latCell) - nCells = dims(0) - - radius = 6371220.0 - xsec_latitude = 3.141592653 / 2.0 - xsec_lat_inc = 3.141592653 / (int2flt(nsec) - 1.0) - - xsecx = new((/nsec/),float) - xsecy = new((/nsec/),float) - xsecz = new((/nsec/),float) - xsec_id = new((/nsec/),integer) - xsec_edge_id = new((/nsec/),integer) - xsec_vtx_id = new((/nsec/),integer) - - ; Compute (x,y,z) coordinates for points on cross section - do i=0,nsec-1 - xsecx(i) = radius * cos(xsec_longitude) * cos(xsec_latitude) - xsecy(i) = radius * sin(xsec_longitude) * cos(xsec_latitude) - xsecz(i) = radius * sin(xsec_latitude) - xsec_latitude = xsec_latitude - xsec_lat_inc - end do - - ; Find cell containing first cross section point - dmin = 2.0 * radius - cellmin = -1 - do i=0,nCells-1 - d = sqrt((xCell(i) - xsecx(0))^2.0 + (yCell(i) - xsecy(0))^2.0 + (zCell(i) - xsecz(0))^2.0) - if (d .lt. dmin) then - cellmin = i - dmin = doubletofloat(d) - end if - end do - xsec_id(0) = cellmin - - ; For all other cross section points, find the grid cell containing them - do j=1,nsec-1 - moved = 1 - do while (moved .ne. 0) - moved = 0 - d = sqrt((xCell(cellmin) - xsecx(j))^2.0 + (yCell(cellmin) - xsecy(j))^2.0 + (zCell(cellmin) - xsecz(j))^2.0) - do k=0,nCellsOnCell(cellmin)-1 - dn = sqrt((xCell(cellsOnCell(cellmin,k)-1) - xsecx(j))^2.0 + (yCell(cellsOnCell(cellmin,k)-1) - xsecy(j))^2.0 + (zCell(cellsOnCell(cellmin,k)-1) - xsecz(j))^2.0) - if (dn .lt. d) then - d = dn - nearest = (/cellsOnCell(cellmin,k)/)-1 - moved = 1 - end if - end do - if (moved .eq. 1) then - cellmin = nearest - end if - end do - xsec_id(j) = cellmin - end do - - ; For all cross section points, find the nearest vertex and edge - do i=0,nsec-1 - iVtx = verticesOnCell(xsec_id(i),0) - 1 - iEdge = edgesOnCell(xsec_id(i),0) - 1 - xsec_edge_id(i) = iEdge - xsec_vtx_id(i) = iVtx - de = sqrt((xEdge(iEdge) - xsecx(i))^2.0 + (yEdge(iEdge) - xsecy(i))^2.0 + (zEdge(iEdge) - xsecz(i))^2.0) - dv = sqrt((xVertex(iVtx) - xsecx(i))^2.0 + (yVertex(iVtx) - xsecy(i))^2.0 + (zVertex(iVtx) - xsecz(i))^2.0) - do j=1,nCellsOnCell(xsec_id(i))-1 - iVtx = verticesOnCell(xsec_id(i),j) - 1 - iEdge = edgesOnCell(xsec_id(i),j) - 1 - de_test = sqrt((xEdge(iEdge) - xsecx(i))^2.0 + (yEdge(iEdge) - xsecy(i))^2.0 + (zEdge(iEdge) - xsecz(i))^2.0) - dv_test = sqrt((xVertex(iVtx) - xsecx(i))^2.0 + (yVertex(iVtx) - xsecy(i))^2.0 + (zVertex(iVtx) - xsecz(i))^2.0) - if (de_test .lt. de) then - de = de_test - xsec_edge_id(i) = iEdge - end if - if (dv_test .lt. dv) then - dv = dv_test - xsec_vtx_id(i) = iVtx - end if - end do - end do - - res = True - res@gsnMaximize = True - res@gsnSpreadColors = True - - res@cnFillMode = "AreaFill" - - if (filled) then - res@cnFillOn = True - res@cnLinesOn = False - res@cnLineLabelsOn = False - else - res@cnFillOn = False - res@cnLinesOn = True - res@cnLineLabelsOn = True - end if - - res@cnLevelSpacingF = 50.0 - res@cnInfoLabelOn = True - - res@lbLabelAutoStride = True - res@lbBoxLinesOn = False - - res@gsnFrame = False - - t = stringtointeger(getenv("T")) - if (plotfield .eq. "h") then - fld = f->h(t,:,:) - hs = f->h_s(:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - do i=0,nVertLevels-1 - fld(:,i) = fld(:,i) + hs(:) - end do - end if - if (plotfield .eq. "ke") then - fld = f->ke(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - end if - if (plotfield .eq. "vorticity") then - fld = f->vorticity(t,:,:) - ldims = dimsizes(fld) - nVertLevels = ldims(1) - xsec_id(:) = xsec_vtx_id(:) - end if - res@cnLineDashPattern = 0 - - ; Extract field from along cross section into plotting array - arr = new((/nVertLevels,nsec/),float) - do i=0,nsec-1 - do j=0,nVertLevels-1 - arr(j,i) = doubletofloat(fld(xsec_id(i),j)) - end do - end do - - map = gsn_csm_contour(wks,arr,res) - - if (horiz_winds) then - u = f->u(t,:,:) - v = f->v(t,:,:) - esizes = dimsizes(u) - u_earth = new((/nVertLevels,nsec/),float) - v_earth = new((/nVertLevels,nsec/),float) - x_edge = new((/nVertLevels,nsec/),float) - y_edge = new((/nVertLevels,nsec/),float) - do i=0,nsec-1 - do j=0,nVertLevels-1 - u_earth(j,i) = doubletofloat(u(xsec_edge_id(i),j)*cos(alpha(xsec_edge_id(i))) - v(xsec_edge_id(i),j)*sin(alpha(xsec_edge_id(i)))) - v_earth(j,i) = doubletofloat(u(xsec_edge_id(i),j)*sin(alpha(xsec_edge_id(i))) + v(xsec_edge_id(i),j)*cos(alpha(xsec_edge_id(i)))) - x_edge(j,i) = i - y_edge(j,i) = j - end do - end do - - wmsetp("VCH",0.0010) - wmsetp("VRN",0.010) - wmsetp("VRS",100.0) - wmsetp("VCW",0.10) - - wmvect(wks, x_edge, y_edge, u_earth, v_earth) - end if - - frame(wks) - -end - diff --git a/python_scripts/paraview_vtk_field_extractor/add_earth_sphere.py b/visualization/paraview_vtk_field_extractor/add_earth_sphere.py similarity index 100% rename from python_scripts/paraview_vtk_field_extractor/add_earth_sphere.py rename to visualization/paraview_vtk_field_extractor/add_earth_sphere.py diff --git a/python_scripts/paraview_vtk_field_extractor/annotate_date.py b/visualization/paraview_vtk_field_extractor/annotate_date.py similarity index 100% rename from python_scripts/paraview_vtk_field_extractor/annotate_date.py rename to visualization/paraview_vtk_field_extractor/annotate_date.py diff --git a/visualization/paraview_vtk_field_extractor/mpas_tools b/visualization/paraview_vtk_field_extractor/mpas_tools new file mode 120000 index 000000000..627733f3b --- /dev/null +++ b/visualization/paraview_vtk_field_extractor/mpas_tools @@ -0,0 +1 @@ +../../conda_package/mpas_tools/ \ No newline at end of file diff --git a/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py similarity index 57% rename from python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py rename to visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py index e1e0a7232..c963f025e 100755 --- a/python_scripts/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py +++ b/visualization/paraview_vtk_field_extractor/paraview_vtk_field_extractor.py @@ -9,7 +9,7 @@ It can extract a field across multiple files by passing in a regular expression for the filename patter. As an example, one can run the script using: -`./paraview_vtk_field_extractor.py -v areaCell,latVertex -f "hist.comp.*.nc"` + ./paraview_vtk_field_extractor.py -v areaCell,latVertex -f "hist.comp.*.nc" To extract a time series of areaCell,latVertex that spans multiple files. By default, time-independent fields on cells are written to a file @@ -31,7 +31,7 @@ a single index, or a comma-separated list of indices or a range of indices indices (separated by 1 or 2 colons). For example, -`-d maxEdges= nVertLeves=0:10:2 nParticles=0,2,4,6,8` + -d maxEdges= nVertLeves=0:10:2 nParticles=0,2,4,6,8 will ignore any fields with dimension maxEdges, extract every other layer from the first 10 vertical levels (each into its own field) and extract the five @@ -40,24 +40,55 @@ An index array can also be specified in this way (and these can be mixed with integer indices in a comma-separated list but not in a colon-separated range): -`-d nVertLeves=0,maxLevelCell` + -d nVertLeves=0,maxLevelCell will extract fields from the first vertical level and the vertical level with index given by maxLevelCell. +The extractor includes optional support for extracting geometry appropriate +for displaying variables at the depth of a topographic feature (typically the +top or bottom of the domain) for MPAS components with a spatially variable +top or bottom index (e.g. `maxLevelCell` in MPAS-Ocean). This is accomplished +with flags such as: + + --topo_dim=nVertLevels --topo_cell_index=maxLevelCell + +Fields on cells are sampled at the topographic index and the geometry includes +polygons corresponding to edges so that vertical faces between adjacent cells +can be displayed. Fields are extracted as normal except that they are sampled +as point data rather than cell data, allowing computations in ParaView to +display the topography. A mask field is also included indicating which parts +of edge polygons correspond to the boundary of the domain (boundaryMask == 1) +and which parts of cell and edge polygons are interior (boundaryMask == 0). +Together, this can be used to plot topography by using a calculator filter like +the following: + + coords*(1.0 + 100.0/mag(coords)*((1 - boundaryMask)*(-bottomDepth) + + 10.0*boundaryMask)) + +If this is entered into a Calculator Filter in ParaView with the "coordinate +result" box checked, the result will to display the MPAS-Ocean topography, +exaggerated by a factor of 100, with a value equivalent to 10 m along boundary +points of edge polygons (a "water-tight" surface). + Requirements: This script requires access to the following non standard modules: -pyevtk (available from opengeostat channel) -netCDF4 +evtk (available from e3sm channel) +netcdf4 numpy +for python 2.7: +future + Optional modules: -progressbar +progressbar2 """ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + import os import numpy as np -from netCDF4 import Dataset as NetCDFFile from netCDF4 import date2num from datetime import datetime @@ -69,14 +100,15 @@ except ImportError: use_progress_bar = False -import utils +from mpas_tools import viz def build_field_time_series(local_time_indices, file_names, mesh_file, out_dir, blocking, all_dim_vals, blockDimName, variable_list, vertices, connectivity, offsets, valid_mask, output_32bit, combine_output, append, - xtimeName): # {{{ + xtimeName, topo_dim=None, topo_cell_indices=None, + cell_to_point_map=None, boundary_mask=None): # {{{ if len(variable_list) == 0: return @@ -87,7 +119,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, outType = 'float64' # Get dimension info to allocate the size of Colors - time_series_file = NetCDFFile(file_names[0], 'r') + time_series_file = viz.open_netcdf(file_names[0]) if mesh_file is not None: # blockDim may not exist in time series file @@ -95,8 +127,17 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, else: blockDim = len(time_series_file.dimensions[blockDimName]) + if boundary_mask is not None: + variable_list.append('boundaryMask') + all_dim_vals['boundaryMask'] = None + pointData = True + cellData = False + else: + pointData = False + cellData = True + # Pre-compute the number of blocks - nBlocks = 1 + blockDim / blocking + nBlocks = int(np.ceil(blockDim / blocking)) nPolygons = len(offsets) nPoints = len(vertices[0]) @@ -107,7 +148,9 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, nHyperSlabs = 0 for iVar in range(nVars): var_name = variable_list[iVar] - if xtimeName is not None: + if boundary_mask is not None and var_name == 'boundaryMask': + var_has_time_dim[iVar] = False + elif xtimeName is not None: if var_name in time_series_file.variables: var_has_time_dim[iVar] = \ 'Time' in time_series_file.variables[var_name].dimensions @@ -122,10 +165,18 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, else: nHyperSlabs += len(extra_dim_vals) - time_series_file.close() - any_var_has_time_dim = np.any(var_has_time_dim) + if topo_dim is not None: + if (mesh_file is not None) and (topo_dim in mesh_file.dimensions): + nTopoLevels = len(mesh_file.dimensions[topo_dim]) + else: + nTopoLevels = len(time_series_file.dimensions[topo_dim]) + else: + nTopoLevels = None + + time_series_file.close() + try: os.makedirs(out_dir) except OSError: @@ -148,7 +199,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, field_bar = ProgressBar(widgets=widgets, maxval=nTimes*nHyperSlabs).start() else: - print "Writing time series...." + print("Writing time series....") suffix = blockDimName[1:] if any_var_has_time_dim: @@ -157,27 +208,27 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, else: out_prefix = "timeDependentFieldsOn{}".format(suffix) # start the pvd file - pvd_file = utils.write_pvd_header(out_dir, out_prefix) + pvd_file = viz.write_pvd_header(out_dir, out_prefix) pvd_file.write('\n') if not combine_output and not np.all(var_has_time_dim): - out_prefix = "staticFieldsOn{}".format(suffix) + static_prefix = "staticFieldsOn{}".format(suffix) varIndices = np.arange(nVars)[np.logical_not(var_has_time_dim)] - timeIndependentFile = utils.write_vtp_header(out_dir, - out_prefix, - varIndices[0], - varIndices, - variable_list, - all_dim_vals, - vertices, - connectivity, - offsets, - nPoints, - nPolygons, - outType, - cellData=True, - pointData=False, - xtime=None) + timeIndependentFile = viz.write_vtp_header(out_dir, + static_prefix, + varIndices[0], + varIndices, + variable_list, + all_dim_vals, + vertices, + connectivity, + offsets, + nPoints, + nPolygons, + outType, + cellData=cellData, + pointData=pointData, + xtime=None) prev_file = "" for time_index in range(nTimes): @@ -185,7 +236,7 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if prev_file != file_names[time_index]: if prev_file != "": time_series_file.close() - time_series_file = NetCDFFile(file_names[time_index], 'r') + time_series_file = viz.open_netcdf(file_names[time_index]) prev_file = file_names[time_index] if any_var_has_time_dim: @@ -193,16 +244,27 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, xtime = None years = float(time_index) else: - if xtimeName not in time_series_file.variables: - raise ValueError("xtime variable name {} not found in " - "{}".format(xtimeName, time_series_file)) - var = time_series_file.variables[xtimeName] - xtime = ''.join(var[local_time_indices[time_index], :]).strip() - date = datetime(int(xtime[0:4]), int(xtime[5:7]), - int(xtime[8:10]), int(xtime[11:13]), - int(xtime[14:16]), int(xtime[17:19])) - years = date2num(date, units='days since 0000-01-01', - calendar='noleap')/365. + if xtimeName == 'none': + xtime = '{}'.format(time_index) + years = float(time_index) + else: + if xtimeName not in time_series_file.variables: + raise ValueError("xtime variable name {} not found in " + "{}".format(xtimeName, + time_series_file)) + var = time_series_file.variables[xtimeName] + if len(var.shape) == 2: + xtime = var[local_time_indices[time_index], + :].tostring().decode('utf-8').strip() + date = datetime(int(xtime[0:4]), int(xtime[5:7]), + int(xtime[8:10]), int(xtime[11:13]), + int(xtime[14:16]), int(xtime[17:19])) + years = date2num(date, units='days since 0000-01-01', + calendar='noleap')/365. + else: + xtime = var[local_time_indices[time_index]] + years = xtime/365. + xtime = str(xtime) # write the header for the vtp file vtp_file_prefix = "time_series/{}.{:d}".format(out_prefix, @@ -218,21 +280,21 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, varIndices = np.arange(nVars) else: varIndices = np.arange(nVars)[var_has_time_dim] - timeDependentFile = utils.write_vtp_header(out_dir, - vtp_file_prefix, - varIndices[0], - varIndices, - variable_list, - all_dim_vals, - vertices, - connectivity, - offsets, - nPoints, - nPolygons, - outType, - cellData=True, - pointData=False, - xtime=xtime) + timeDependentFile = viz.write_vtp_header(out_dir, + vtp_file_prefix, + varIndices[0], + varIndices, + variable_list, + all_dim_vals, + vertices, + connectivity, + offsets, + nPoints, + nPolygons, + outType, + cellData=cellData, + pointData=pointData, + xtime=xtime) # add time step to pdv file pvd_file.write('\n') - pvd_file.write('\n') # }}} + pvd_file.write('\n') + pvd_file.close() # }}} if __name__ == "__main__": if use_progress_bar: - print " -- Using progress bars --" + print(" -- Using progress bars --") else: - print " -- Progress bars are not available--" + print(" -- Progress bars are not available--") parser = \ argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) @@ -343,21 +419,27 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, parser.add_argument("-o", "--out_dir", dest="out_dir", help="the output directory.", default='vtk_files', metavar="DIR") - parser.add_argument("-x", "--xtime", dest="xtime", - help="the name of the xtime variable", default='xtime', - metavar="XTIME") + parser.add_argument("-x", "--xtime", dest="xtime", default='xtime', + metavar="XTIME", + help="the name of the xtime variable or 'none' to " + "extract Time dim without xtime") parser.add_argument("-l", "--lonlat", dest="lonlat", help="If set, the resulting points are in lon-lat " "space, not Cartesian.", action="store_true") parser.add_argument("-t", "--time", dest="time", help="Indices for the time dimension", metavar="TIME", required=False) - parser.add_argument("--ignore_time", dest="ignore_time", + parser.add_argument("--ignore_time", dest="ignore_time", required=False, action="store_true", help="ignore the Time dimension if it exists " "for files with a Time dimension but no xtime" - "variable (e.g. mesh file)", - required=False) + "variable (e.g. mesh file)") + parser.add_argument("--topo_dim", dest="topo_dim", required=False, + help="Dimension and range for topography dimension") + parser.add_argument("--topo_cell_index", dest="topo_cell_index", + required=False, + help="Index array indicating the bottom of the domain " + "(default is the topo_dim-1 for all cells)") args = parser.parse_args() if not args.output_32bit: @@ -373,12 +455,12 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if args.ignore_time: args.xtime = None - (time_indices, time_file_names) = utils.setup_time_indices( + (time_indices, time_file_names) = viz.setup_time_indices( args.filename_pattern, args.xtime) if args.time: time_indices, time_file_names = \ - utils.parse_time_indices(args.time, time_indices, time_file_names) + viz.parse_time_indices(args.time, time_indices, time_file_names) separate_mesh_file = True if not args.mesh_filename: @@ -386,33 +468,48 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, separate_mesh_file = False # Setting dimension values: - time_series_file = NetCDFFile(time_file_names[0], 'r') + time_series_file = viz.open_netcdf(time_file_names[0]) if separate_mesh_file: - mesh_file = NetCDFFile(args.mesh_filename, 'r') + mesh_file = viz.open_netcdf(args.mesh_filename) else: mesh_file = None - extra_dims = utils.parse_extra_dims(args.dimension_list, time_series_file, - mesh_file) + extra_dims, topo_cell_indices = \ + viz.parse_extra_dims(args.dimension_list, time_series_file, + mesh_file, topo_dim=args.topo_dim, + topo_cell_index_name=args.topo_cell_index) + basic_dims = ['nCells', 'nEdges', 'nVertices', 'Time'] + include_dims = ['nCells', 'nEdges', 'nVertices'] + if args.topo_dim is not None: + basic_dims.append(args.topo_dim) + include_dims = ['nCells'] + (all_dim_vals, cellVars, vertexVars, edgeVars) = \ - utils.setup_dimension_values_and_sort_vars(time_series_file, mesh_file, - args.variable_list, - extra_dims) + viz.setup_dimension_values_and_sort_vars( + time_series_file, mesh_file, args.variable_list, extra_dims, + basic_dims=basic_dims) time_series_file.close() if(mesh_file is not None): mesh_file.close() - utils.summarize_extraction(args.mesh_filename, time_indices, cellVars, - vertexVars, edgeVars) + viz.summarize_extraction(args.mesh_filename, time_indices, cellVars, + vertexVars, edgeVars) # Handle cell variables if len(cellVars) > 0: - print " -- Extracting cell fields --" + print(" -- Extracting cell fields --") - mesh_file = NetCDFFile(args.mesh_filename, 'r') + mesh_file = viz.open_netcdf(args.mesh_filename) # Build cell geometry - (vertices, connectivity, offsets, valid_mask) = \ - utils.build_cell_geom_lists(mesh_file, use_32bit, args.lonlat) + if args.topo_dim is None: + (vertices, connectivity, offsets, valid_mask) = \ + viz.build_cell_geom_lists(mesh_file, use_32bit, args.lonlat) + cell_to_point_map = None + boundary_mask = None + else: + (vertices, connectivity, offsets, valid_mask, cell_to_point_map, + boundary_mask) = viz.build_topo_point_and_polygon_lists( + mesh_file, use_32bit, args.lonlat) if not separate_mesh_file: mesh_file.close() @@ -422,20 +519,24 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, args.out_dir, args.blocking, all_dim_vals, 'nCells', cellVars, vertices, connectivity, offsets, valid_mask, use_32bit, - args.combine_output, args.append, args.xtime) + args.combine_output, args.append, args.xtime, + topo_dim=args.topo_dim, + topo_cell_indices=topo_cell_indices, + cell_to_point_map=cell_to_point_map, + boundary_mask=boundary_mask) if separate_mesh_file: mesh_file.close() - print "" + print("") if len(vertexVars) > 0: - print " -- Extracting vertex fields --" + print(" -- Extracting vertex fields --") - mesh_file = NetCDFFile(args.mesh_filename, 'r') + mesh_file = viz.open_netcdf(args.mesh_filename) # Build vertex geometry (vertices, connectivity, offsets, valid_mask) = \ - utils.build_vertex_geom_lists(mesh_file, use_32bit, args.lonlat) + viz.build_vertex_geom_lists(mesh_file, use_32bit, args.lonlat) if not separate_mesh_file: mesh_file.close() @@ -450,16 +551,16 @@ def build_field_time_series(local_time_indices, file_names, mesh_file, if separate_mesh_file: mesh_file.close() - print "" + print("") if len(edgeVars) > 0: - print " -- Extracting edge fields --" + print(" -- Extracting edge fields --") - mesh_file = NetCDFFile(args.mesh_filename, 'r') + mesh_file = viz.open_netcdf(args.mesh_filename) # Build cell list (vertices, connectivity, offsets, valid_mask) = \ - utils.build_edge_geom_lists(mesh_file, use_32bit, args.lonlat) + viz.build_edge_geom_lists(mesh_file, use_32bit, args.lonlat) if not separate_mesh_file: mesh_file.close() diff --git a/visualization/python/planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py b/visualization/planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py similarity index 100% rename from visualization/python/planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py rename to visualization/planar_grids/convert_mpas_grid_to_regular_grid_netcdf.py diff --git a/visualization/python/planar_grids/plot_mpas_field.py b/visualization/planar_grids/plot_mpas_field.py similarity index 100% rename from visualization/python/planar_grids/plot_mpas_field.py rename to visualization/planar_grids/plot_mpas_field.py diff --git a/visualization/python/planar_grids/plot_mpas_field_xsect.py b/visualization/planar_grids/plot_mpas_field_xsect.py similarity index 100% rename from visualization/python/planar_grids/plot_mpas_field_xsect.py rename to visualization/planar_grids/plot_mpas_field_xsect.py diff --git a/visualization/python/planar_grids/plot_mpas_velocity_on_edges.py b/visualization/planar_grids/plot_mpas_velocity_on_edges.py similarity index 100% rename from visualization/python/planar_grids/plot_mpas_velocity_on_edges.py rename to visualization/planar_grids/plot_mpas_velocity_on_edges.py diff --git a/visualization/python/planar_grids/visualize_blocks.py b/visualization/planar_grids/visualize_blocks.py similarity index 100% rename from visualization/python/planar_grids/visualize_blocks.py rename to visualization/planar_grids/visualize_blocks.py diff --git a/visualization/transport_sections/README b/visualization/transport_sections/README deleted file mode 100644 index 18ca701ea..000000000 --- a/visualization/transport_sections/README +++ /dev/null @@ -1,41 +0,0 @@ -transport_sections README - -This is a matlab tool to find sections that connect two points on the -globe. These sections are a sequence of connected edges, and the -edges and other variables are output as both a netcdf and text files. -The transport can then be measured using this matlab code using output -files, or in MPAS-Ocean during runtime. - -To begin, change the parameters at the top of transport_sections.m. -You will need to change the text strings wd, sim(i).dir, and -sim(i).netcdf_file so that the text string - -[wd '/' sim(i).dir '/' sim(i).netcdf_file ] is the file path, - -where wd is the working directory and dir is the run directory. -Details of the section coordinates and variables may be specified in -transport_sections.m. - -The data files only need to contain a small number of variables. -You may need to reduce the file size before copying to a local -machine using: - -ncks -v acc_u, \ -nAccumulate,latVertex,lonVertex,verticesOnEdge,edgesOnVertex,hZLevel,\ -dvEdge \ -file_in.nc file_out.nc - -The matlab scripts will create a new netcdf file named -{your_domain}_section_edge_data.nc. To merge this file with an -existing grid or restart file, use: - -ncks -A -v sectionEdgeIndex,sectionEdgeSign,nEdgesInSection,\ -sectionText,sectionAbbreviation,sectionCoord \ -{your_domain}_section_edge_data.nc your_restart_file.nc - -These matlab tools have been tested for ocean output files, but should -nearly work for other cores as well. A few lines will need to be -changed. - -Mark Petersen, MPAS-Ocean Team, LANL, May 2012 - diff --git a/visualization/transport_sections/compute_transport.m b/visualization/transport_sections/compute_transport.m deleted file mode 100644 index 4315ddf25..000000000 --- a/visualization/transport_sections/compute_transport.m +++ /dev/null @@ -1,99 +0,0 @@ -function tr_total = compute_transport ... - (wd,dir,netcdf_file, ... - sectionEdgeIndex, sectionEdgeSign, ... - nEdgesInSection, sectionData,sectionText,sectionAbbreviation) - -% Load large variables from netcdf file - -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 - -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% var_conv_factor multiply each variable by this unit conversion. -% sectionEdgeIndex(maxEdges,nSections) cell index of each section -% nEdgesInSection(nSections) number of cells in each section -% sectionData(nVertLevels,max(nEdgesInSection),nSections,nVars) -% data in each cross-section for each variable -% sectionText a cell array with text describing each section -% sectionAbbreviation an 8-character title for each section - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Compute transport through each section -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** Compute transport: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ]; -ncid = netcdf.open(filename,'nc_nowrite'); - -refLayerThickness = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'refLayerThickness')); -dvEdge = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'dvEdge')); -[dimname,nVertLevels]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertLevels')); -[dimname,nTimeSlices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'Time')); -netcdf.close(ncid) - -nSections = length(nEdgesInSection); -maxNEdgesInSection = max(nEdgesInSection); - -m3ps_to_Sv = 1e-6; % m^3/sec flux to Sverdrups - -% the volume transport -tr = zeros(nVertLevels,maxNEdgesInSection,nSections); -tr_total = zeros(nSections,nTimeSlices); - -for iTime=1:nTimeSlices -header=' '; -data_str=' '; -for iSection = 1:nSections - for i=1:nEdgesInSection(iSection) - iEdge = sectionEdgeIndex(i,iSection); - for k=1:nVertLevels - % Compute transport. - % I am assuming here that sectionData(:,:,:,1) contains avgNormalVelocity - tr(k,i,iSection,iTime) = sectionEdgeSign(i,iSection)... - *sectionData(k,i,iSection,1,iTime)*dvEdge(iEdge)* ... - refLayerThickness(k)*m3ps_to_Sv; - tr_total(iSection,iTime) = tr_total(iSection,iTime) + tr(k,i,iSection,iTime); - - % This is edge velocity - %tr(k,i,iSection,iTime) = sectionEdgeSign(i,iSection)*sectionData(k,i,iSection,1,iTime); - end - end - - % Optional, for plotting the flow across a cross-section. - % This plots u on edges, so columns oscillate as edges change - % direction. The best way to view a cross-section is to use the - % uMeridional and uZonal at the cell center. - %figure(iSection+1) - %imagesc(log(abs(tr(:,1:nEdgesInSection(iSection),iSection)))) - %imagesc(tr(:,1:nEdgesInSection(iSection),iSection)) - %colorbar - - % note: flow computed in matlab only matches that computed in - % MPAS-O if they both use refLayerThickness. To do a verification check, - % replace the line - % * h_edge(k,iEdge)*m3ps_to_Sv; - % in mpas_ocn_time_average.F with the line - % * refLayerThickness(k,iEdge)*m3ps_to_Sv; - - temptext = char(sectionText(iSection)); -% fprintf(['Section %3i, ' temptext(1:22) ' observed flow:' ... -% temptext(63:75) ' mpas flow: %20.15f Sv\n'],iSection,tr_total(iSection)) - - header = [header sectionAbbreviation(iSection,:) ' ']; - data_str = [data_str num2str_fixed(tr_total(iSection,iTime),'%4.1f',7)... - ' ']; -end - -if iTime==1 -fprintf(['\n Summary, in Sv: \n' header '\n' ]) -end -fprintf([data_str ' \n']) -end - - -fprintf('\n') - diff --git a/visualization/transport_sections/find_edge_sections.m b/visualization/transport_sections/find_edge_sections.m deleted file mode 100644 index d9297c1df..000000000 --- a/visualization/transport_sections/find_edge_sections.m +++ /dev/null @@ -1,239 +0,0 @@ -function [sectionEdgeIndex, sectionEdgeSign, nEdgesInSection, ... - latSectionVertex,lonSectionVertex, ... - latVertexDeg,lonVertexDeg] = find_edge_sections ... - (wd,dir,netcdf_file,sectionText,sectionCoord) - -% This function reads grid data from an MPAS-Ocean grid or restart -% netCDF file, and finds a path of edges that connect the endpoints -% specified in sectionCoord. The path is forced to travel through edges -% that are closest to the line connecting the beginning and end -% edges. -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% The text string [wd '/' dir '/' netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. -% sectionText a cell array with text describing each section -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% -%%%%%%%%%% output arguments %%%%%%%%% -% sectionEdgeIndex(maxNEdgesInSection,nSections) edge index of each section -% sectionEdgeSign(maxNEdgesInSection,nSections) sign of each -% section, positive is to right of path direction. -% nEdgesInSection(nSections) number of edges in each section -% latSectionVertex(maxNEdgesInSection,nSections) lat coordinates of each section -% lonSectionVertex(maxNEdgesInSection,nSections) lon coordinates of each section -% latVertexDeg(nEdges) lat arrays for all edges -% lonVertexDeg(nEdges) lon arrays for all edges - -%%%%%%%%%% parameters internal to this function %%%%%%%%%% - -% maxEdges specifies the maximum number of Edges attempted along -% the path to the end-edge before stopping with a warning. -maxEdges = 1500; - -% Make sure sectionCoord traverse from south to north, and from east to west. -% [startlat startlon endlat endlon] -nSections = size(sectionCoord,1); -for j=1:nSections - latChange = sectionCoord(j,3) - sectionCoord(j,1); - lonChange = sectionCoord(j,4) - sectionCoord(j,2); - if abs(lonChange)>abs(latChange) % zonal section - if lonChange>0 - fprintf(['Warning: Zonal sections should go from east to west. ' ... - 'For section %g start and end longitudes are %g, %g \n'], ... - j,sectionCoord(j,2),sectionCoord(j,4)) - end - else - if latChange<0 - fprintf(['Warning: Meridional sections should go from south to north. ' ... - 'For section %g start and end latitudes are %g, %g \n'], ... - j,sectionCoord(j,1),sectionCoord(j,3)) - end - end - -end - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Read edge and edge data from grid file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf(['** find_edge_sections, simulation: ' dir '\n']) - -filename = [wd '/' dir '/' netcdf_file ]; -ncid = netcdf.open(filename,'nc_nowrite'); - -latVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'latVertex')); -lonVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'lonVertex')); -verticesOnEdge = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'verticesOnEdge')); -edgesOnVertex = netcdf.getVar(ncid,netcdf.inqVarID(ncid,'edgesOnVertex')); -[dimname,nEdges]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nEdges')); -[dimname,nVertices]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'nVertices')); -[dimname,vertexDegree]= netcdf.inqDim(ncid,netcdf.inqDimID(ncid,'vertexDegree')); -netcdf.close(ncid) - -% Grid variables should be: -% lat varies from -pi/2:pi/2 -% lon varies from 0:2*pi -if (min(lonVertex)<-1e-8) - lonVertex = mod(lonVertex,2*pi); -end -% convert to degrees for plotting: -latVertexDeg = latVertex*180/pi; -lonVertexDeg = lonVertex*180/pi; - -sectionVertexIndex = zeros(maxEdges,nSections); -sectionEdgeIndex = zeros(maxEdges,nSections); -sectionEdgeSign = zeros(maxEdges,nSections); -nEdgesInSection = zeros(1,nSections); - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Find edges that connect beginning and ending points -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -for iSection=1:nSections - latCoord = [sectionCoord(iSection,1) sectionCoord(iSection,3)]/180*pi; - lonCoord = [sectionCoord(iSection,2) sectionCoord(iSection,4)]/180*pi; - - % Find vertex closest to start and end coordinates. - % The seed vertex array simply stores start and end index. - minDist = 1e10*ones(1,2); - seedVertexIndex = zeros(1,2); - for iVertex = 1:nVertices - for i=1:2 - dist = sqrt( ... - (lonCoord(i) - lonVertex(iVertex))^2 ... - + (latCoord(i) - latVertex(iVertex))^2); - if (dist0) - % Find the vertex on the other side of iEdge - if (verticesOnEdge(1,iEdge)==sectionVertexIndex(i,iSection)) - iVertex = verticesOnEdge(2,iEdge); - % Going from vertex 1 to vertex 2. Leave positive. - edgeSign = 1; - else - iVertex = verticesOnEdge(1,iEdge); - % Going from vertex 2 to vertex 1. Make negative. - edgeSign = -1; - end - - % I am using lat/lon Cartesian distance. - % This is distance to the final vertex location. - dist = sqrt( ... - (lonVertex(iVertex) - lonVertex(endVertexIndex))^2 ... - + (latVertex(iVertex) - latVertex(endVertexIndex))^2 ); - -%fprintf('%6i %6i %8.4f %8.4f h1=plot(%g,%g); h2=plot(%g,%g); \n',... -%i,j,dist,distLastVertex,... -% lonVertex(iVertex)*180/pi,latVertex(iVertex)*180/pi,... -% lonVertex(endVertexIndex)*180/pi,latVertex(endVertexIndex)*180/pi) - % check if this vertex is closer to the end vertex than the - % last vertex. If so, it is a candidate, and we can continue. - if (dist-1e-8) - minLon = 0.0; - latTrans = 360; -else - minLon = -180.0; - latTrans = 0.0; -end - - % plot topo data of the earth. This is just low-rez one deg - % data for visual reference. - load('topo.mat','topo','topomap1'); - if minLon==-180 - topoNew(:,1:180) = topo(:,181:360); - topoNew(:,181:360) = topo(:,1:180); - image([-180 180],[-90 90],topoNew,'CDataMapping', 'scaled'); - else - image([0 360],[-90 90],topo,'CDataMapping', 'scaled'); - end - - colormap(topomap1); - patch([-10 1000 1000 -10 -10],[-100 -100 100 100 -100],[.5 1 0]) - patch([-10 1000 1000 -10 -10],[-100 -100 100 100 -100],[1 1 1]) - set(gca,'YDir','normal') - - hold on - - % world - axis([0 360 -90 90]) - set(gca,'XTick',30*[-10:12]) - set(gca,'YTick',15*[-20:20]) - - % half world -% axis([-240+latTrans 0+latTrans -80 70]) -% set(gca,'XTick',20*[-10:20]) -% set(gca,'YTick',10*[-20:20]) - - % N Atlantic -% axis([-90+latTrans -5+latTrans -5 70]) -% set(gca,'XTick',[-100:5:360]) -% set(gca,'YTick',[-90:5:90]) - - % Drake passage -% axis([-90+latTrans,-50+latTrans,-75,-50]) -% set(gca,'XTick',[-100:2:360]) - % set(gca,'YTick',[-200:2:200]) - - % Pacific -% axis([130 260 -10 10]) -% set(gca,'XTick',[0:1:300]) -% set(gca,'YTick',[-20:.1:20]) - - - % plot vertexs. This is just done for debugging. - h=plot(lonVertexDeg,latVertexDeg,'.b'); - set(h,'MarkerSize',2) - - grid on - - for iSection=1:nSections - latCoordDeg = [sectionCoord(iSection,1) sectionCoord(iSection,3)]; - lonCoordDeg = [sectionCoord(iSection,2) sectionCoord(iSection,4)]; - - %h=plot([mod(lonCoordDeg,360)],[latCoordDeg],'*-'); - %set(h,'Color','y','LineWidth',1) - %h=plot([mod(lonCoordDeg(1),360)],[latCoordDeg(1)],'*k'); - - for i=1:nEdgesInSection(iSection) - h = line([lonSectionVertex(i,iSection) lonSectionVertex(i+1,iSection)],... - [latSectionVertex(i,iSection) latSectionVertex(i+1,iSection)]); - set(h,'Color','r','LineWidth',2) - %plot([lonVertexDeg(sectionVertexIndex(i+1,iSection))], ... - % [latVertexDeg(sectionVertexIndex(i+1,iSection))],'sk') - end - end - - ylabel('latitude') - xlabel('longitude') - title(['Domain: ' regexprep(dir,'_','\\_') ' Edges of transport sections. ']) - - set(gcf,'PaperPositionMode','auto','color',[.8 1 .8], ... - 'PaperPosition',[0.25 0.25 16 8]) - - subplot('position',[0 .95 1 .05]); axis off - text(.005,.7,[ date ]); - - dir_name1 = regexprep(dir,'\.','_'); - dir_name2 = regexprep(dir_name1,'/','_'); - filename=['f/' dir_name2 '_vertex_map' ]; - print('-djpeg',[filename '.jpg']); - - % put printing text in a latex file - fprintf(fid_latex,... - ['\\begin{figure}[btp] \\center \n \\includegraphics[width=7.5in]{'... - filename '.jpg} \n\\end{figure} \n']); diff --git a/visualization/transport_sections/transport_sections.m b/visualization/transport_sections/transport_sections.m deleted file mode 100644 index 2d11d91ec..000000000 --- a/visualization/transport_sections/transport_sections.m +++ /dev/null @@ -1,280 +0,0 @@ -%function transport_sections - -% Specify data files, coordinates and text, then call functions -% to find edge sections, load data, and compute transport through -% each section. -% -% This script produces new netcdf files in the subdirectory -% netcdf_files which can then be merged with grid.nc or restart.nc -% files to collect transport data in MPAS-Ocean -% -% To merge the new *_section_edge_data.nc with an existing grid or -% restart file, use: -% ncks -A -v sectionEdgeIndex,sectionEdgeSign,nEdgesInSection,\ -% sectionText,sectionAbbreviation,sectionCoord \ -% your_file_section_edge_data.nc your_restart_file.nc -% -% Mark Petersen, MPAS-Ocean Team, LANL, March 2014 - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify data files -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% all plots are placed in the f directory. Comment out if not needed. -unix('mkdir -p f netcdf_files docs text_files'); - -% The text string [wd '/' sim(i).dir '/' sim(i).netcdf_file ] is the file path, -% where wd is the working directory and dir is the run directory. - -wd = '/var/tmp/mpeterse/runs/'; -dir='m91'; -abc = 'klmnop'; - -for letter=1:length(abc) - -% These files only need to contain a small number of variables. -% You may need to reduce the file size before copying to a local -% machine using: -% ncks -v avgNormalVelocity,avgNormalTransportVelocity,nAverage,latVertex,lonVertex,verticesOnEdge,edgesOnVertex,refLayerThickness,dvEdge \ -% file_in.nc file_out.nc - -clear sim -for j=1:3 - sim(j).dir=[dir abc(letter)]; - sim(j).netcdf_file = ['output.00' num2str_fixed0(16+j,'%g',2) '-02-01_00.00.00.nc_transport_vars.nc']; -end - - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify section coordinates and text -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% sectionText a cell array with text describing each section -sectionText = { -'Drake Passage, S Ocean -56 to -63 lat, 68W lon, section A21, 140+/- 6 Sv in Ganachaud_Wunsch00n and Ganachaud99thesis',... -'Tasmania-Ant, S Ocean -44 to -66 lat, 140E lon, section P12, 157+/-10 Sv in Ganachaud_Wunsch00n and Ganachaud99thesis',... -'Africa-Ant, S Ocean -31.3to -70 lat, 30E lon, section I6, Sv in Ganachaud99thesis ',... -'Antilles Inflow, Carib. -18.4+/-4.7Sv in Johns_ea02dsr '... -'Mona Passage, Caribbian -2.6+/-1.2Sv in Johns_ea02dsr '... -'Windward Passage, Carib -7.0 Sv in Nelepo_ea76sr, Roemmich81jgr '... -'Florida-Cuba, Caribbian 31.5+/-1.5Sv in Johns_ea02dsr, 32.3+/-3.2Sv Larsen92rslpt'... -'Florida-Bahamas, Carib. 27 lat, -80 to -78.8lon, 31.5+/-1.5Sv in Johns_ea02dsr, 32.3+/-3.2Sv Larsen92rslpt'... -'Indonesian Throughflow, -9 to -18 lat, 116E lon, section J89, -16+/- 5 Sv in Ganachaud_Wunsch00n and Ganachaud99thesis',... -'Agulhas -70+/-20 Sv in Bryden_Beal01dsr ',... -'Mozambique Channel, -25 lat, 35 to 44E lon, section I4 , -14+/- 6 Sv in Ganachaud_Wunsch00n and Ganachaud99thesis',... -'Bering Strait, Arctic 0.83+/-0.66Sv in Roach_ea95jgr '... - }; -%'Lancaster Sound, Arctic 0.67+/-0.3Sv in Maltrud_McLean05om '... -%'Fram Strait, Arctic -4.2+/-2.3Sv in Fahrbach_ea01pr '... -%'Robeson Channel, Arctic -0.75+/-0.2Sv in Maltrud_McLean05om '... - -% sectionAbbreviation an 8-character title for each section -sectionAbbreviation = [... - 'Drake Pa';... - 'Tasm-Ant';... - 'Afri-Ant';... - 'Antilles';... - 'Mona Pas';... - 'Wind Pas';... - 'FL-Cuba ';... - 'FL-Baham';... - 'Ind Thru';... - 'Agulhas ';... - 'Mozam Ch';... - 'Bering ';... -]; -% 'Lancastr';... -% 'Fram ';... -% 'Robeson ';... - -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] -% Traverse from south to north, and from east to west. -% Then positive velocities are eastward and northward. -sectionCoord = [... - -64.5 -64 -55 -65.3;... % Drake - -67 140 -43.5 147 ;... % Tasm-Ant - -70.0 30 -31.3 30 ;... % Afri-Ant - 10.7 -63.2 18.0 -65.9;... % Antilles - 18.4 -67.2 18.4 -68.5;... % Mona Passage - 19.8 -73.4 20.1 -74.3;... % Windward Passage - 23.1 -81.0 25.15 -81.0;... % Florida-Cuba - 26.52 -78.78 26.7 -80.1;... % Florida-Bahamas - -21 116.0 -8.8 116 ;... % Ind Thru - -32.4 32.0 -31.0 30.2;... % Agulhas - -25 44.0 -25.0 34 ;... % Mozam Ch - 65.8 -167.7 66.1 -169.7;... % Bering St - ]; -% 73.7 -80.6 74.6 -81.0;... % Lancaster Sound- was not able to -% get this to connect for all resolutions -% 79.7 10.7 79.7 -17.7;... % Fram St - crosses 0 lon. This is not in code yet. -% 81.0 -63.5 82.0 -63.5;... % Robeson Ch - was not able to get this to connect - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify variables -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -% var_name(nVars) a cell array with text for each variable to -% load or compute. -% var_conv_factor multiply each variable by this unit conversion. -% var_lims(nVars,3) contour line definition: min, max, interval - -% Eulerian velocity from prognostic momentum equation -var_name = {'avgNormalVelocity'}; -% total transport velocity -%var_name = {'avgNormalTransportVelocity'} - -var_conv_factor = [1 1 1]; % No conversion here. - -var_lims = [-10 10 2.5; -10 10 2.5; 0 20 2.5]; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Specify actions to be taken -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -find_edge_sections_flag = true ; -write_edge_sections_text_flag = false ; -write_edge_sections_netcdf_flag = false ; -plot_edge_sections_flag = true ; -compute_transport_flag = true ; - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Begin main code. Normally this does not need to change. -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%close all - -% change the coordinate range to be 0 to 360. -sectionCoord(:,2) = mod(sectionCoord(:,2),360); -sectionCoord(:,4) = mod(sectionCoord(:,4),360); - -for iSim = 1:length(sim) - - fprintf(['**** simulation: ' sim(iSim).dir ' ' sim(iSim).netcdf_file '\n']) - unix(['mkdir -p docs/' sim(iSim).netcdf_file '_dir/f']); - fid_latex = fopen('temp.tex','w'); - fprintf(fid_latex,['%% file created by plot_mpas_cross_sections, ' date '\n\n']); - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Find edges that connect beginning and end points of section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if find_edge_sections_flag - [sim(iSim).sectionEdgeIndex, sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sim(iSim).latSectionVertex,sim(iSim).lonSectionVertex, ... - sim(iSim).latVertexDeg,sim(iSim).lonVertexDeg] ... - = find_edge_sections(wd,sim(iSim).dir,sim(iSim).netcdf_file, ... - sectionText,sectionCoord); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Write section edge information to a netcdf file - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if write_edge_sections_text_flag - write_edge_sections_text... - (sim(iSim).dir, sim(iSim).sectionEdgeIndex, ... - sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sectionText,sectionAbbreviation,sectionCoord) - end - - if write_edge_sections_netcdf_flag - write_edge_sections_netcdf... - (sim(iSim).dir, sim(iSim).sectionEdgeIndex, ... - sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sectionText,sectionAbbreviation,sectionCoord) - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Plot edge section locations on world map - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if plot_edge_sections_flag - sub_plot_edge_sections(sim(iSim).dir,sectionCoord, ... - sim(iSim).latSectionVertex,sim(iSim).lonSectionVertex, ... - sim(iSim).latVertexDeg,sim(iSim).lonVertexDeg, ... - sim(iSim).sectionEdgeIndex, sim(iSim).nEdgesInSection,... - fid_latex); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Load large variables from netcdf file - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_transport_flag - [sim(iSim).sectionData] = load_large_variables_edge ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, var_name,var_conv_factor, ... - sim(iSim).sectionEdgeIndex, sim(iSim).nEdgesInSection); - end - - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - % - % Compute transport through each section - % - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if compute_transport_flag - sim(iSim).tr_total = compute_transport ... - (wd,sim(iSim).dir,sim(iSim).netcdf_file, ... - sim(iSim).sectionEdgeIndex, sim(iSim).sectionEdgeSign, sim(iSim).nEdgesInSection, ... - sim(iSim).sectionData,sectionText,sectionAbbreviation); - - if iSim==1 - tr_total = sim(iSim).tr_total'; - else - tr_total = [tr_total; sim(iSim).tr_total']; - end - - end - - fclose(fid_latex); - -end % iSim - -% tr_total -mean_transport = mean(tr_total,1); -%fprintf(['mean over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',mean_transport) -fprintf([' mean, ' sim(1).dir ' \n']) - -var_transport = var(tr_total,1); -%fprintf(['variance over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',var_transport) -fprintf([' var, ' sim(1).dir ' \n']) - -std_transport = std(tr_total,1); -%fprintf(['stdev over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',std_transport) -fprintf([' std, ' sim(1).dir ' \n']) - -min_transport = min(tr_total,[],1); -%fprintf(['minimum over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',min_transport) -fprintf([' min, ' sim(1).dir ' \n']) - -max_transport = max(tr_total,[],1); -%fprintf(['maximum over time, ' sim(1).dir ' \n' ]) -fprintf('%10.2f',max_transport) -fprintf([' max, ' sim(1).dir ' \n']) - -filename = ['data/' sim(1).dir '_' char(var_name) '_small_data_file.mat'] -clear sim -save(filename) - -end % letter diff --git a/visualization/transport_sections/write_edge_sections_netcdf.m b/visualization/transport_sections/write_edge_sections_netcdf.m deleted file mode 100644 index acdec2a46..000000000 --- a/visualization/transport_sections/write_edge_sections_netcdf.m +++ /dev/null @@ -1,76 +0,0 @@ -function write_edge_sections_netcdf ... - (dir, ... - sectionEdgeIndex, sectionEdgeSign, nEdgesInSection,... - sectionText,sectionAbbreviation,sectionCoord) - -% Write section edge information to the netcdf file -% netcdf_files/your_dir_transport_section_edges.nc -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% dir string with run directory name -% sectionEdgeIndex(maxNEdgesInSection,nSections) edge index of each section -% sectionEdgeSign(maxNEdgesInSection,nSections) sign of each -% section, positive is to right of path direction. -% nEdgesInSection(nSections) number of cells in each section -% sectionText a cell array with text describing each section -% sectionAbbreviation an 8-character title for each section -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Write section edge information to a netcdf file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** Write edge information to file: ' dir '\n']) - -nSections = length(nEdgesInSection); -maxNEdgesInSection = max(nEdgesInSection); - -dir_name1 = regexprep(dir,'/','_'); -filename = ['netcdf_files/' dir_name1 '_transport_section_edges.nc']; -ncid = netcdf.create(filename,'nc_clobber'); - -% Define the dimensions of the variable. -dimid_nSections = netcdf.defDim(ncid,'nSections',nSections); -dimid_maxNEdgesInSection = netcdf.defDim(ncid,'maxNEdgesInSection',maxNEdgesInSection); -dimid_latLonPairs = netcdf.defDim(ncid,'latLonPairs',4); -dimid_CharLength8 = netcdf.defDim(ncid,'CharLength8',8); -dimid_CharLength120 = netcdf.defDim(ncid,'CharLength120',120); - -% Define a new variable in the file. -sectionEdgeIndex_varID = netcdf.defVar(ncid,'sectionEdgeIndex',... - 'int',[dimid_maxNEdgesInSection dimid_nSections]); - -sectionEdgeSign_varID = netcdf.defVar(ncid,'sectionEdgeSign',... - 'int',[dimid_maxNEdgesInSection dimid_nSections]); - -nEdgesInSection_varID = netcdf.defVar(ncid,'nEdgesInSection',... - 'int', [dimid_nSections]); - -sectionText_varID = netcdf.defVar(ncid,'sectionText',... - 'char',[dimid_CharLength120 dimid_nSections]); -sectionAbbreviation_varID = netcdf.defVar(ncid,'sectionAbbreviation',... - 'char',[dimid_CharLength8 dimid_nSections]); -sectionCoord_varID = netcdf.defVar(ncid,'sectionCoord',... - 'double',[dimid_latLonPairs dimid_nSections]); - - -% Leave define mode and enter data mode to write data. -netcdf.endDef(ncid) - -% Write data to variable. -netcdf.putVar(ncid,sectionEdgeIndex_varID,sectionEdgeIndex); -netcdf.putVar(ncid,sectionEdgeSign_varID,sectionEdgeSign); -netcdf.putVar(ncid,nEdgesInSection_varID,nEdgesInSection); -netcdf.putVar(ncid,sectionText_varID,char(sectionText)'); -netcdf.putVar(ncid,sectionAbbreviation_varID,sectionAbbreviation'); -netcdf.putVar(ncid,sectionCoord_varID,sectionCoord); - -netcdf.close(ncid) - -fprintf('\n') - diff --git a/visualization/transport_sections/write_edge_sections_text.m b/visualization/transport_sections/write_edge_sections_text.m deleted file mode 100644 index 8141ec4fd..000000000 --- a/visualization/transport_sections/write_edge_sections_text.m +++ /dev/null @@ -1,104 +0,0 @@ -function write_edge_sections_text ... - (dir, ... - sectionEdgeIndex, sectionEdgeSign, nEdgesInSection,... - sectionText,sectionAbbreviation,sectionCoord) - -% Write section edge information to the text file -% text_files/your_dir_transport_section_edges.nc -% -% Mark Petersen, MPAS-Ocean Team, LANL, May 2012 -% -%%%%%%%%%% input arguments %%%%%%%%% -% dir string with run directory name -% sectionEdgeIndex(maxNEdgesInSection,nSections) edge index of each section -% sectionEdgeSign(maxNEdgesInSection,nSections) sign of each -% section, positive is to right of path direction. -% nEdgesInSection(nSections) number of cells in each section -% sectionText a cell array with text describing each section -% sectionAbbreviation an 8-character title for each section -% sectionCoord(nSections,4) endpoints of sections, with one section per row as -% [startlat startlon endlat endlon] - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% -% Write section edge information to a text file -% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -fprintf('\n') -fprintf(['** Write edge information to file: ' dir '\n']) - -nSections = length(nEdgesInSection); -maxNEdgesInSection = max(nEdgesInSection); - -dir_name1 = regexprep(dir,'/','_'); -unix(['mkdir -p text_files/' dir_name1 ]); - -% sectionEdgeIndex -filename = ['text_files/' dir_name1 '/sectionEdgeIndex.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10i',sectionEdgeIndex(:,j)); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionEdgeIndex -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionEdgeIndex.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10i',sectionEdgeIndex(:,j)); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionEdgeSign -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionEdgeSign.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10i',sectionEdgeSign(:,j)); - fprintf(fid,' \n'); -end -fclose(fid); - -% nEdgesInSection -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/nEdgesInSection.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10i',nEdgesInSection(j)); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionText -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionText.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %s',char(sectionText(j))); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionAbbreviation -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionAbbreviation.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %s',sectionAbbreviation(j,:)); - fprintf(fid,' \n'); -end -fclose(fid); - -% sectionCoord -dir_name1 = regexprep(dir,'/','_'); -filename = ['text_files/' dir_name1 '/sectionCoord.txt']; -fid = fopen(filename,'w'); -for j=1:nSections - fprintf(fid,' %10.3f',sectionCoord(j,:)); - fprintf(fid,' \n'); -end -fclose(fid); -