Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 77 additions & 8 deletions lib/iris/fileformats/pp_load_rules.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# (C) British Crown Copyright 2013 - 2018, Met Office
# (C) British Crown Copyright 2013 - 2019, Met Office
#
# This file is part of Iris.
#
Expand Down Expand Up @@ -30,7 +30,6 @@
from iris.coords import AuxCoord, CellMethod, DimCoord
from iris.fileformats.rules import (ConversionMetadata, Factory, Reference,
ReferenceTarget)
import iris.fileformats.pp
from iris.fileformats._pp_lbproc_pairs import LBPROC_MAP
from iris.fileformats.um_cf_map import (LBFC_TO_CF, STASH_TO_CF,
STASHCODE_IMPLIED_HEIGHTS)
Expand Down Expand Up @@ -444,6 +443,81 @@ def _new_coord_and_dims(is_vector_operation,
_HOURS_UNIT = cf_units.Unit('hours')


def _epoch_date_hours(epoch_hours_unit, datetime):
"""
Return an 'hours since epoch' number for a date.

Args:
* epoch_hours_unit (:class:`cf_unit.Unit'):
Unit defining the calendar and zero-time of conversion.
* datetime (:class:`datetime.datetime`-like):
Date object containing year / month / day attributes.

This routine can also handle dates with a zero year, month or day : such
dates were valid inputs to 'date2num' up to cftime version 1.0.1, but are
now illegal : This routine interprets any zeros as being "1 year/month/day
before a year/month/day of 1". This produces results consistent with the
"old" cftime behaviour.

"""
days_offset = None
if (datetime.year == 0 or datetime.month == 0 or datetime.day == 0):
# cftime > 1.0.1 no longer allows non-calendar dates.
# Add 1 to year/month/day, to get a valid date, and adjust the result
# according to the actual epoch and calendar. This reproduces 'old'
# results that were produced with cftime <= 1.0.1.
days_offset = 0
y, m, d = datetime.year, datetime.month, datetime.day
calendar = epoch_hours_unit.calendar
if d == 0:
# Add one day, by changing day=0 to 1.
d = 1
days_offset += 1
if m == 0:
# Add a 'January', by changing month=0 to 1.
m = 1
if calendar == cf_units.CALENDAR_GREGORIAN:
days_offset += 31
elif calendar == cf_units.CALENDAR_360_DAY:
days_offset += 30
elif calendar == cf_units.CALENDAR_365_DAY:
days_offset += 31
else:
msg = 'unrecognised calendar : {}'
raise ValueError(msg.format(calendar))

if y == 0:
# Add a 'Year 0', by changing year=0 to 1.
y = 1
if calendar == cf_units.CALENDAR_GREGORIAN:
days_in_year_0 = 366
elif calendar == cf_units.CALENDAR_360_DAY:
days_in_year_0 = 360
elif calendar == cf_units.CALENDAR_365_DAY:
days_in_year_0 = 365
else:
msg = 'unrecognised calendar : {}'
raise ValueError(msg.format(calendar))

days_offset += days_in_year_0

# Replace y/m/d with a modified date, that cftime will accept.
datetime = datetime.replace(year=y, month=m, day=d)

# netcdf4python has changed it's behaviour, at version 1.2, such
# that a date2num calculation returns a python float, not
# numpy.float64. The behaviour of round is to recast this to an
# int, which is not the desired behaviour for PP files.
# So, cast the answer to numpy.float_ to be safe.
epoch_hours = np.float_(epoch_hours_unit.date2num(datetime))

if days_offset is not None:
# Correct for any modifications to achieve a valid date.
epoch_hours -= 24.0 * days_offset

return epoch_hours


def _convert_time_coords(lbcode, lbtim, epoch_hours_unit,
t1, t2, lbft,
t1_dims=(), t2_dims=(), lbft_dims=()):
Expand Down Expand Up @@ -481,12 +555,7 @@ def _convert_time_coords(lbcode, lbtim, epoch_hours_unit,

"""
def date2hours(t):
# netcdf4python has changed it's behaviour, at version 1.2, such
# that a date2num calculation returns a python float, not
# numpy.float64. The behaviour of round is to recast this to an
# int, which is not the desired behaviour for PP files.
# So, cast the answer to numpy.float_ to be safe.
epoch_hours = np.float_(epoch_hours_unit.date2num(t))
epoch_hours = _epoch_date_hours(epoch_hours_unit, t)
if t.minute == 0 and t.second == 0:
epoch_hours = round(epoch_hours)
return epoch_hours
Expand Down
153 changes: 64 additions & 89 deletions lib/iris/tests/integration/test_pp.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# (C) British Crown Copyright 2013 - 2017, Met Office
# (C) British Crown Copyright 2013 - 2019, Met Office
#
# This file is part of Iris.
#
Expand Down Expand Up @@ -48,19 +48,27 @@ def _test_coord(self, cube, point, bounds=None, **kwargs):
if bounds is not None:
self.assertArrayEqual(coords[0].bounds, [bounds])

def test_soil_level_round_trip(self):
# Use pp.load_cubes() to convert a fake PPField into a Cube.
# NB. Use MagicMock so that SplittableInt header items, such as
# LBCODE, support len().
soil_level = 1234
@staticmethod
def _mock_field(**kwargs):
mock_data = np.zeros(1)
mock_core_data = mock.MagicMock(return_value=mock_data)
field = mock.MagicMock(lbvc=6, lblev=soil_level,
stash=iris.fileformats.pp.STASH(1, 0, 9),
lbuser=[0] * 7, lbrsvd=[0] * 4,
field = mock.MagicMock(lbuser=[0] * 7, lbrsvd=[0] * 4,
brsvd=[0] * 4, brlev=0,
t1=mock.MagicMock(year=1990, month=1, day=3),
t2=mock.MagicMock(year=1990, month=1, day=3),
core_data=mock_core_data,
realised_dtype=mock_data.dtype)
field.configure_mock(**kwargs)
return field

def test_soil_level_round_trip(self):
# Use pp.load_cubes() to convert a fake PPField into a Cube.
# NB. Use MagicMock so that SplittableInt header items, such as
# LBCODE, support len().
soil_level = 1234
field = self._mock_field(
lbvc=6, lblev=soil_level,
stash=iris.fileformats.pp.STASH(1, 0, 9))
load = mock.Mock(return_value=iter([field]))
with mock.patch('iris.fileformats.pp.load', new=load) as load:
cube = next(iris.fileformats.pp.load_cubes('DUMMY'))
Expand Down Expand Up @@ -89,14 +97,9 @@ def test_soil_depth_round_trip(self):
# LBCODE, support len().
lower, point, upper = 1.2, 3.4, 5.6
brsvd = [lower, 0, 0, 0]
mock_data = np.zeros(1)
mock_core_data = mock.MagicMock(return_value=mock_data)
field = mock.MagicMock(lbvc=6, blev=point,
stash=iris.fileformats.pp.STASH(1, 0, 9),
lbuser=[0] * 7, lbrsvd=[0] * 4,
brsvd=brsvd, brlev=upper,
core_data=mock_core_data,
realised_dtype=mock_data.dtype)
field = self._mock_field(
lbvc=6, blev=point, brsvd=brsvd, brlev=upper,
stash=iris.fileformats.pp.STASH(1, 0, 9))
load = mock.Mock(return_value=iter([field]))
with mock.patch('iris.fileformats.pp.load', new=load) as load:
cube = next(iris.fileformats.pp.load_cubes('DUMMY'))
Expand Down Expand Up @@ -126,12 +129,7 @@ def test_potential_temperature_level_round_trip(self):
# NB. Use MagicMock so that SplittableInt header items, such as
# LBCODE, support len().
potm_value = 22.5
mock_data = np.zeros(1)
mock_core_data = mock.MagicMock(return_value=mock_data)
field = mock.MagicMock(lbvc=19, blev=potm_value,
lbuser=[0] * 7, lbrsvd=[0] * 4,
core_data=mock_core_data,
realised_dtype=mock_data.dtype)
field = self._mock_field(lbvc=19, blev=potm_value)
load = mock.Mock(return_value=iter([field]))
with mock.patch('iris.fileformats.pp.load', new=load):
cube = next(iris.fileformats.pp.load_cubes('DUMMY'))
Expand All @@ -149,40 +147,46 @@ def test_potential_temperature_level_round_trip(self):
self.assertEqual(field.lbvc, 19)
self.assertEqual(field.blev, potm_value)

@staticmethod
def _field_with_data(scale=1, **kwargs):
x, y = 40, 30
mock_data = np.arange(1200).reshape(y, x) * scale
mock_core_data = mock.MagicMock(return_value=mock_data)
field = mock.MagicMock(core_data=mock_core_data,
realised_dtype=mock_data.dtype,
lbcode=[1],
lbnpt=x, lbrow=y, bzx=350, bdx=1.5,
bzy=40, bdy=1.5, lbuser=[0] * 7,
lbrsvd=[0] * 4,
t1=mock.MagicMock(year=1990, month=1, day=3),
t2=mock.MagicMock(year=1990, month=1, day=3))

field._x_coord_name = lambda: 'longitude'
field._y_coord_name = lambda: 'latitude'
field.coord_system = lambda: None
field.configure_mock(**kwargs)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I definitely prefer approach with adding **kwargs here 👍

return field

def test_hybrid_pressure_round_trip(self):
# Use pp.load_cubes() to convert fake PPFields into Cubes.
# NB. Use MagicMock so that SplittableInt header items, such as
# LBCODE, support len().
def field_with_data(scale=1):
x, y = 40, 30
mock_data = np.arange(1200).reshape(y, x) * scale
mock_core_data = mock.MagicMock(return_value=mock_data)
field = mock.MagicMock(core_data=mock_core_data,
realised_dtype=mock_data.dtype,
lbcode=[1],
lbnpt=x, lbrow=y, bzx=350, bdx=1.5,
bzy=40, bdy=1.5, lbuser=[0] * 7,
lbrsvd=[0] * 4)

field._x_coord_name = lambda: 'longitude'
field._y_coord_name = lambda: 'latitude'
field.coord_system = lambda: None
return field

# Make a fake reference surface field.
pressure_field = field_with_data(10)
pressure_field.stash = iris.fileformats.pp.STASH(1, 0, 409)
pressure_field.lbuser[3] = 409
pressure_field = self._field_with_data(
10,
stash=iris.fileformats.pp.STASH(1, 0, 409),
lbuser=[0, 0, 0, 409, 0, 0, 0])

# Make a fake data field which needs the reference surface.
model_level = 5678
sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95
delta_lower, delta, delta_upper = 0.05, 0.1, 0.15
data_field = field_with_data()
data_field.configure_mock(lbvc=9, lblev=model_level,
bhlev=delta, bhrlev=delta_lower,
blev=sigma, brlev=sigma_lower,
brsvd=[sigma_upper, delta_upper])
data_field = self._field_with_data(
lbvc=9, lblev=model_level,
bhlev=delta, bhrlev=delta_lower,
blev=sigma, brlev=sigma_lower,
brsvd=[sigma_upper, delta_upper])

# Convert both fields to cubes.
load = mock.Mock(return_value=iter([pressure_field, data_field]))
Expand Down Expand Up @@ -236,35 +240,21 @@ def field_with_data(scale=1):
self.assertEqual(data_field.brsvd, [sigma_upper, delta_upper])

def test_hybrid_pressure_with_duplicate_references(self):
def field_with_data(scale=1):
x, y = 40, 30
mock_data = np.arange(1200).reshape(y, x) * scale
mock_core_data = mock.MagicMock(return_value=mock_data)
field = mock.MagicMock(core_data=mock_core_data,
realised_dtype=mock_data.dtype,
lbcode=[1],
lbnpt=x, lbrow=y, bzx=350, bdx=1.5,
bzy=40, bdy=1.5, lbuser=[0] * 7,
lbrsvd=[0] * 4)
field._x_coord_name = lambda: 'longitude'
field._y_coord_name = lambda: 'latitude'
field.coord_system = lambda: None
return field

# Make a fake reference surface field.
pressure_field = field_with_data(10)
pressure_field.stash = iris.fileformats.pp.STASH(1, 0, 409)
pressure_field.lbuser[3] = 409
pressure_field = self._field_with_data(
10,
stash=iris.fileformats.pp.STASH(1, 0, 409),
lbuser=[0, 0, 0, 409, 0, 0, 0])

# Make a fake data field which needs the reference surface.
model_level = 5678
sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95
delta_lower, delta, delta_upper = 0.05, 0.1, 0.15
data_field = field_with_data()
data_field.configure_mock(lbvc=9, lblev=model_level,
bhlev=delta, bhrlev=delta_lower,
blev=sigma, brlev=sigma_lower,
brsvd=[sigma_upper, delta_upper])
data_field = self._field_with_data(
lbvc=9, lblev=model_level,
bhlev=delta, bhrlev=delta_lower,
blev=sigma, brlev=sigma_lower,
brsvd=[sigma_upper, delta_upper])

# Convert both fields to cubes.
load = mock.Mock(return_value=iter([data_field,
Expand Down Expand Up @@ -351,30 +341,15 @@ def test_hybrid_height_round_trip_no_reference(self):
# Use pp.load_cubes() to convert fake PPFields into Cubes.
# NB. Use MagicMock so that SplittableInt header items, such as
# LBCODE, support len().
def field_with_data(scale=1):
x, y = 40, 30
mock_data = np.arange(1200).reshape(y, x) * scale
mock_core_data = mock.MagicMock(return_value=mock_data)
field = mock.MagicMock(core_data=mock_core_data,
realised_dtype=mock_data.dtype,
lbcode=[1],
lbnpt=x, lbrow=y, bzx=350, bdx=1.5,
bzy=40, bdy=1.5, lbuser=[0] * 7,
lbrsvd=[0] * 4)
field._x_coord_name = lambda: 'longitude'
field._y_coord_name = lambda: 'latitude'
field.coord_system = lambda: None
return field

# Make a fake data field which needs the reference surface.
model_level = 5678
sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95
delta_lower, delta, delta_upper = 0.05, 0.1, 0.15
data_field = field_with_data()
data_field.configure_mock(lbvc=65, lblev=model_level,
bhlev=sigma, bhrlev=sigma_lower,
blev=delta, brlev=delta_lower,
brsvd=[delta_upper, sigma_upper])
data_field = self._field_with_data(
lbvc=65, lblev=model_level,
bhlev=sigma, bhrlev=sigma_lower,
blev=delta, brlev=delta_lower,
brsvd=[delta_upper, sigma_upper])

# Convert field to a cube.
load = mock.Mock(return_value=iter([data_field]))
Expand Down
8 changes: 4 additions & 4 deletions lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml
Original file line number Diff line number Diff line change
Expand Up @@ -396,8 +396,8 @@
</auxCoord>
</coord>
<coord datadims="[0]">
<dimCoord id="1d45e087" points="[0.166666664183, 0.333333332092, 0.5,
0.666666667908, 0.833333335817, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/>
<dimCoord id="1d45e087" points="[0.166666666686, 0.333333333314, 0.5,
0.666666666686, 0.833333333314, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/>
</coord>
<coord>
<dimCoord id="9c8bdf81" points="[347926.0]" shape="(1,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/>
Expand Down Expand Up @@ -919,8 +919,8 @@
</auxCoord>
</coord>
<coord datadims="[0]">
<dimCoord id="1d45e087" points="[0.166666664183, 0.333333332092, 0.5,
0.666666667908, 0.833333335817, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/>
<dimCoord id="1d45e087" points="[0.166666666686, 0.333333333314, 0.5,
0.666666666686, 0.833333333314, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/>
</coord>
<coord>
<dimCoord id="9c8bdf81" points="[347926.0]" shape="(1,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/>
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/tests/results/grib_load/polar_stereo_grib1.cml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<cube dtype="float64" units="unknown">
<coords>
<coord>
<dimCoord bounds="[[0.0, 0.999999996275]]" id="1d45e087" points="[0.499999998137]" shape="(1,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/>
<dimCoord bounds="[[0.0, 1.0]]" id="1d45e087" points="[0.5]" shape="(1,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/>
</coord>
<coord>
<auxCoord id="61bde96d" long_name="originating_centre" points="[ US National Weather Service, National Centres for Environmental Prediction]" shape="(1,)" units="Unit('no_unit')" value_type="string"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -414,8 +414,8 @@
</auxCoord>
</coord>
<coord datadims="[0]">
<auxCoord id="1d45e087" points="[0.166666664183, 0.333333332092, 0.5,
0.666666667908, 0.833333335817, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64" var_name="forecast_period"/>
<auxCoord id="1d45e087" points="[0.166666666686, 0.333333333314, 0.5,
0.666666666686, 0.833333333314, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64" var_name="forecast_period"/>
</coord>
<coord>
<dimCoord id="9c8bdf81" points="[347926.0]" shape="(1,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64" var_name="forecast_reference_time"/>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# (C) British Crown Copyright 2014 - 2018, Met Office
# (C) British Crown Copyright 2014 - 2019, Met Office
#
# This file is part of Iris.
#
Expand Down Expand Up @@ -140,8 +140,8 @@ def test_not_exact_hours(self):
lbcode=_lbcode(1), lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
t1=t1, t2=t2, lbft=None)
(fp, _), (t, _), (frt, _) = coords_and_dims
self.assertEqual(fp.points[0], 7.1666666641831398)
self.assertEqual(t.points[0], 394927.16666666418)
self.assertArrayAllClose(fp.points[0], 7.1666666, atol=0.0001, rtol=0)
self.assertArrayAllClose(t.points[0], 394927.166666, atol=0.01, rtol=0)


class TestLBTIMx2x_TimePeriod(TestField):
Expand Down
Loading