Skip to content
4 changes: 2 additions & 2 deletions examples/meteogram_metpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def plot_winds(self, ws, wd, wsmax, plot_range=None):
ax7.set_ylim(0, 360)
ax7.set_yticks(np.arange(45, 405, 90), ['NE', 'SE', 'SW', 'NW'])
lns = ln1 + ln2 + ln3
labs = [l.get_label() for l in lns]
labs = [ln.get_label() for ln in lns]
ax7.xaxis.set_major_formatter(mpl.dates.DateFormatter('%d/%H UTC'))
ax7.legend(lns, labs, loc='upper center',
bbox_to_anchor=(0.5, 1.2), ncol=3, prop={'size': 12})
Expand Down Expand Up @@ -112,7 +112,7 @@ def plot_thermo(self, t, td, plot_range=None):
ax_twin = self.ax2.twinx()
ax_twin.set_ylim(plot_range[0], plot_range[1], plot_range[2])
lns = ln4 + ln5
labs = [l.get_label() for l in lns]
labs = [ln.get_label() for ln in lns]
ax_twin.xaxis.set_major_formatter(mpl.dates.DateFormatter('%d/%H UTC'))

self.ax2.legend(lns, labs, loc='upper center',
Expand Down
2 changes: 1 addition & 1 deletion src/metpy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,6 @@
os.environ['PINT_ARRAY_PROTOCOL_FALLBACK'] = '0'

from ._version import get_version # noqa: E402
from .xarray import * # noqa: F401, F403
from .xarray import * # noqa: F401, F403, E402
__version__ = get_version()
del get_version
4 changes: 2 additions & 2 deletions src/metpy/calc/thermo.py
Original file line number Diff line number Diff line change
Expand Up @@ -982,7 +982,7 @@ def equivalent_potential_temperature(pressure, temperature, dewpoint):
th_l = t * (1000 / (p - e)) ** mpconsts.kappa * (t / t_l) ** (0.28 * r)
th_e = th_l * np.exp((3036. / t_l - 1.78) * r * (1 + 0.448 * r))

return th_e * units.kelvin
return units.Quantity(th_e, units.kelvin)


@exporter.export
Expand Down Expand Up @@ -1049,7 +1049,7 @@ def saturation_equivalent_potential_temperature(pressure, temperature):
th_l = t * (1000 / (p - e)) ** mpconsts.kappa
th_es = th_l * np.exp((3036. / t - 1.78) * r * (1 + 0.448 * r))

return th_es * units.kelvin
return units.Quantity(th_es, units.kelvin)


@exporter.export
Expand Down
6 changes: 6 additions & 0 deletions src/metpy/plots/_mpl.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,12 @@ def scattertext(self, x, y, texts, loc=(0, 0), **kw):

# Add it to the axes and update range
self.add_artist(text_obj)

# Matplotlib at least up to 3.2.2 does not properly clip text with paths, so
# work-around by setting to the bounding box of the Axes
# TODO: Remove when fixed in our minimum supported version of matplotlib
text_obj.clipbox = self.bbox

self.update_datalim(text_obj.get_datalim(self.transData))
self.autoscale_view()
return text_obj
Expand Down
21 changes: 16 additions & 5 deletions src/metpy/plots/cartopy_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,28 +3,39 @@
# SPDX-License-Identifier: BSD-3-Clause
"""Cartopy specific mapping utilities."""

import cartopy.crs as ccrs
import cartopy.feature as cfeature

from ..cbook import get_test_data


class MetPyMapFeature(cfeature.NaturalEarthFeature):
"""A simple interface to US County shapefiles."""
class MetPyMapFeature(cfeature.Feature):
"""A simple interface to MetPy-included shapefiles."""

def __init__(self, name, scale, **kwargs):
"""Create USCountiesFeature instance."""
super().__init__('', name, scale, **kwargs)
"""Create MetPyMapFeature instance."""
super().__init__(ccrs.PlateCarree(), **kwargs)
self.name = name

if isinstance(scale, str):
scale = cfeature.Scaler(scale)
self.scaler = scale

def geometries(self):
"""Return an iterator of (shapely) geometries for this feature."""
import cartopy.io.shapereader as shapereader
# Ensure that the associated files are in the cache
fname = '{}_{}'.format(self.name, self.scale)
fname = '{}_{}'.format(self.name, self.scaler.scale)
for extension in ['.dbf', '.shx']:
get_test_data(fname + extension)
path = get_test_data(fname + '.shp', as_file_obj=False)
return iter(tuple(shapereader.Reader(path).geometries()))

def intersecting_geometries(self, extent):
"""Return geometries that intersect the extent."""
self.scaler.scale_from_extent(extent)
return super().intersecting_geometries(extent)

def with_scale(self, new_scale):
"""
Return a copy of the feature with a new scale.
Expand Down
4 changes: 2 additions & 2 deletions src/metpy/plots/wx_symbols.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,13 +106,13 @@ def __init__(self, num, font_start, font_jumps=None, char_jumps=None):
font_point += 1

@staticmethod
def _safe_pop(l):
def _safe_pop(li):
"""Safely pop from a list.

Returns None if list empty.

"""
return l.pop(0) if l else None
return li.pop(0) if li else None

def __call__(self, code):
"""Return the Unicode code point corresponding to `code`."""
Expand Down
121 changes: 93 additions & 28 deletions src/metpy/xarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,26 +140,52 @@ def magnitude(self):

@property
def unit_array(self):
"""Return the data values of this DataArray as a `pint.Quantity`."""
"""Return the data values of this DataArray as a `pint.Quantity`.

Notes
-----
If not already existing as a `pint.Quantity` or Dask array, the data of this DataArray
will be loaded into memory by this operation.
"""
if isinstance(self._data_array.data, units.Quantity):
return self._data_array.data
else:
return units.Quantity(self._data_array.values, self.units)
return units.Quantity(self._data_array.data, self.units)

def convert_units(self, units):
"""Return new DataArray with values converted to different units."""
"""Return new DataArray with values converted to different units.

Notes
-----
Any cached/lazy-loaded data (except that in a Dask array) will be loaded into memory
by this operation. Do not utilize on moderate- to large-sized remote datasets before
subsetting!
"""
return self.quantify().copy(data=self.unit_array.to(units))

def convert_coordinate_units(self, coord, units):
"""Return new DataArray with coordinate converted to different units."""
"""Return new DataArray with coordinate converted to different units.

Notes
-----
Any cached/lazy-loaded coordinate data (except that in a Dask array) will be loaded
into memory by this operation.
"""
new_coord_var = self._data_array[coord].copy(
data=self._data_array[coord].metpy.unit_array.m_as(units)
)
new_coord_var.attrs['units'] = str(units)
return self._data_array.assign_coords(coords={coord: new_coord_var})

def quantify(self):
"""Return a DataArray with the data converted to a `pint.Quantity`."""
"""Return a DataArray with the data converted to a `pint.Quantity`.

Notes
-----
Any cached/lazy-loaded data (except that in a Dask array) will be loaded into memory
by this operation. Do not utilize on moderate- to large-sized remote datasets before
subsetting!
"""
if (
not isinstance(self._data_array.data, units.Quantity)
and np.issubdtype(self._data_array.data.dtype, np.number)
Expand Down Expand Up @@ -203,14 +229,17 @@ def cartopy_globe(self):

def _fixup_coordinate_map(self, coord_map):
"""Ensure sure we have coordinate variables in map, not coordinate names."""
new_coord_map = {}
for axis in coord_map:
if coord_map[axis] is not None and not isinstance(coord_map[axis], xr.DataArray):
coord_map[axis] = self._data_array[coord_map[axis]]
new_coord_map[axis] = self._data_array[coord_map[axis]]
else:
new_coord_map[axis] = coord_map[axis]

return coord_map
return new_coord_map

def assign_coordinates(self, coordinates):
"""Assign the given coordinates to the given MetPy axis types.
"""Return new DataArray with given coordinates assigned to the given MetPy axis types.

Parameters
----------
Expand All @@ -221,18 +250,32 @@ def assign_coordinates(self, coordinates):
which will trigger reparsing of all coordinates on next access.

"""
coord_updates = {}
if coordinates:
# Assign the _metpy_axis attributes according to supplied mapping
coordinates = self._fixup_coordinate_map(coordinates)
for axis in coordinates:
if coordinates[axis] is not None:
_assign_axis(coordinates[axis].attrs, axis)
coord_updates[coordinates[axis].name] = (
coordinates[axis].assign_attrs(
_assign_axis(coordinates[axis].attrs.copy(), axis)
)
)
else:
# Clear _metpy_axis attribute on all coordinates
for coord_var in self._data_array.coords.values():
coord_var.attrs.pop('_metpy_axis', None)
for coord_name, coord_var in self._data_array.coords.items():
coord_updates[coord_name] = coord_var.copy(deep=False)

return self._data_array # allow method chaining
# Some coordinates remained linked in old form under other coordinates. We
# need to remove from these.
sub_coords = coord_updates[coord_name].coords
for sub_coord in sub_coords:
coord_updates[coord_name].coords[sub_coord].attrs.pop('_metpy_axis', None)

# Now we can remove the _metpy_axis attr from the coordinate itself
coord_updates[coord_name].attrs.pop('_metpy_axis', None)

return self._data_array.assign_coords(coord_updates)

def _generate_coordinate_map(self):
"""Generate a coordinate map via CF conventions and other methods."""
Expand Down Expand Up @@ -291,6 +334,11 @@ def _metpy_axis_search(self, metpy_axis):
return coord_var

# Opportunistically parse all coordinates, and assign if not already assigned
# Note: since this is generally called by way of the coordinate properties, to cache
# the coordinate parsing results in coord_map on the coordinates means modifying the
# DataArray in-place (an exception to the usual behavior of MetPy's accessor). This is
# considered safe because it only effects the "_metpy_axis" attribute on the
# coordinates, and nothing else.
coord_map = self._generate_coordinate_map()
for axis, coord_var in coord_map.items():
if (coord_var is not None
Expand Down Expand Up @@ -625,7 +673,7 @@ def parse_cf(self, varname=None, coordinates=None):

# Assign coordinates if the coordinates argument is given
if coordinates is not None:
var.metpy.assign_coordinates(coordinates)
var = var.metpy.assign_coordinates(coordinates)

# Attempt to build the crs coordinate
crs = None
Expand Down Expand Up @@ -658,7 +706,7 @@ def _has_coord(coord_type):
var = self._rebuild_coords(var, crs)
if crs is not None:
var = var.assign_coords(coords={'crs': crs})
return var.metpy.quantify()
return var

def _rebuild_coords(self, var, crs):
"""Clean up the units on the coordinate variables."""
Expand Down Expand Up @@ -814,7 +862,7 @@ def assign_y_x(self, force=False, tolerance=None):
return self._dataset.assign_coords(**{y.name: y, x.name: x})

def update_attribute(self, attribute, mapping):
"""Update attribute of all Dataset variables.
"""Return new Dataset with specified attribute updated on all Dataset variables.

Parameters
----------
Expand All @@ -829,24 +877,41 @@ def update_attribute(self, attribute, mapping):
Returns
-------
`xarray.Dataset`
Dataset with attribute updated (modified in place, and returned to allow method
chaining)
New Dataset with attribute updated

"""
# Make mapping uniform
if callable(mapping):
mapping_func = mapping
else:
def mapping_func(varname, **kwargs):
return mapping.get(varname, None)
if not callable(mapping):
old_mapping = mapping

def mapping(varname, **kwargs):
return old_mapping.get(varname, None)

# Apply across all variables
for varname in list(self._dataset.data_vars) + list(self._dataset.coords):
value = mapping_func(varname, **self._dataset[varname].attrs)
if value is not None:
self._dataset[varname].attrs[attribute] = value
# Define mapping function for Dataset.map
def mapping_func(da):
new_value = mapping(da.name, **da.attrs)
if new_value is None:
return da
else:
return da.assign_attrs(**{attribute: new_value})

# Apply across all variables and coordinates
return (
self._dataset
.map(mapping_func, keep_attrs=True)
.assign_coords({
coord_name: mapping_func(coord_var)
for coord_name, coord_var in self._dataset.coords.items()
})
)

return self._dataset
def quantify(self):
"""Return new dataset with all numeric variables quantified and cached data loaded."""
return self._dataset.map(lambda da: da.metpy.quantify(), keep_attrs=True)

def dequantify(self):
"""Return new dataset with variables cast to magnitude and units on attribute."""
return self._dataset.map(lambda da: da.metpy.dequantify(), keep_attrs=True)


def _assign_axis(attributes, axis):
Expand Down
32 changes: 32 additions & 0 deletions tests/calc/test_thermo.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,24 @@ def test_equivalent_potential_temperature():
assert_almost_equal(ept, 311.18586467284007 * units.kelvin, 3)


def test_equivalent_potential_temperature_masked():
"""Test equivalent potential temperature calculation with masked arrays."""
p = 1000 * units.mbar
t = units.Quantity(np.ma.array([293., 294., 295.]), units.kelvin)
td = units.Quantity(
np.ma.array([280., 281., 282.], mask=[False, True, False]),
units.kelvin
)
ept = equivalent_potential_temperature(p, t, td)
expected = units.Quantity(
np.ma.array([311.18586, 313.51781, 315.93971], mask=[False, True, False]),
units.kelvin
)
assert isinstance(ept, units.Quantity)
assert isinstance(ept.m, np.ma.MaskedArray)
assert_array_almost_equal(ept, expected, 3)


def test_saturation_equivalent_potential_temperature():
"""Test saturation equivalent potential temperature calculation."""
p = 700 * units.mbar
Expand All @@ -486,6 +504,20 @@ def test_saturation_equivalent_potential_temperature():
assert_almost_equal(s_ept, 299.096584 * units.kelvin, 3)


def test_saturation_equivalent_potential_temperature_masked():
"""Test saturation equivalent potential temperature calculation with masked arrays."""
p = 1000 * units.mbar
t = units.Quantity(np.ma.array([293., 294., 295.]), units.kelvin)
s_ept = saturation_equivalent_potential_temperature(p, t)
expected = units.Quantity(
np.ma.array([335.02750, 338.95813, 343.08740]),
units.kelvin
)
assert isinstance(s_ept, units.Quantity)
assert isinstance(s_ept.m, np.ma.MaskedArray)
assert_array_almost_equal(s_ept, expected, 3)


def test_virtual_temperature():
"""Test virtual temperature calculation."""
t = 288. * units.kelvin
Expand Down
Binary file modified tests/plots/baseline/test_arrow_projection.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified tests/plots/baseline/test_barb_projection.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified tests/plots/baseline/test_colorfill.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified tests/plots/baseline/test_colorfill_horiz_colorbar.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified tests/plots/baseline/test_colorfill_no_colorbar.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified tests/plots/baseline/test_declarative_contour_options.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
5 changes: 3 additions & 2 deletions tests/plots/test_declarative.py
Original file line number Diff line number Diff line change
Expand Up @@ -542,7 +542,7 @@ def test_declarative_sfc_obs_changes():
return pc.figure


@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0)
@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.00586)
def test_declarative_colored_barbs():
"""Test making a surface plot with a colored barb (gh-1274)."""
data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False),
Expand Down Expand Up @@ -575,7 +575,8 @@ def test_declarative_colored_barbs():


@pytest.mark.mpl_image_compare(remove_text=True,
tolerance={'3.1': 9.771, '2.1': 9.771}.get(MPL_VERSION, 0.))
tolerance={'3.1': 9.771,
'2.1': 9.771}.get(MPL_VERSION, 0.00651))
def test_declarative_sfc_obs_full():
"""Test making a full surface observation plot."""
data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False),
Expand Down
Loading