diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py
index 88a215546b..c4ed256330 100644
--- a/lib/iris/fileformats/pp_load_rules.py
+++ b/lib/iris/fileformats/pp_load_rules.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2013 - 2018, Met Office
+# (C) British Crown Copyright 2013 - 2019, Met Office
#
# This file is part of Iris.
#
@@ -30,7 +30,6 @@
from iris.coords import AuxCoord, CellMethod, DimCoord
from iris.fileformats.rules import (ConversionMetadata, Factory, Reference,
ReferenceTarget)
-import iris.fileformats.pp
from iris.fileformats._pp_lbproc_pairs import LBPROC_MAP
from iris.fileformats.um_cf_map import (LBFC_TO_CF, STASH_TO_CF,
STASHCODE_IMPLIED_HEIGHTS)
@@ -444,6 +443,81 @@ def _new_coord_and_dims(is_vector_operation,
_HOURS_UNIT = cf_units.Unit('hours')
+def _epoch_date_hours(epoch_hours_unit, datetime):
+ """
+ Return an 'hours since epoch' number for a date.
+
+ Args:
+ * epoch_hours_unit (:class:`cf_unit.Unit'):
+ Unit defining the calendar and zero-time of conversion.
+ * datetime (:class:`datetime.datetime`-like):
+ Date object containing year / month / day attributes.
+
+ This routine can also handle dates with a zero year, month or day : such
+ dates were valid inputs to 'date2num' up to cftime version 1.0.1, but are
+ now illegal : This routine interprets any zeros as being "1 year/month/day
+ before a year/month/day of 1". This produces results consistent with the
+ "old" cftime behaviour.
+
+ """
+ days_offset = None
+ if (datetime.year == 0 or datetime.month == 0 or datetime.day == 0):
+ # cftime > 1.0.1 no longer allows non-calendar dates.
+ # Add 1 to year/month/day, to get a valid date, and adjust the result
+ # according to the actual epoch and calendar. This reproduces 'old'
+ # results that were produced with cftime <= 1.0.1.
+ days_offset = 0
+ y, m, d = datetime.year, datetime.month, datetime.day
+ calendar = epoch_hours_unit.calendar
+ if d == 0:
+ # Add one day, by changing day=0 to 1.
+ d = 1
+ days_offset += 1
+ if m == 0:
+ # Add a 'January', by changing month=0 to 1.
+ m = 1
+ if calendar == cf_units.CALENDAR_GREGORIAN:
+ days_offset += 31
+ elif calendar == cf_units.CALENDAR_360_DAY:
+ days_offset += 30
+ elif calendar == cf_units.CALENDAR_365_DAY:
+ days_offset += 31
+ else:
+ msg = 'unrecognised calendar : {}'
+ raise ValueError(msg.format(calendar))
+
+ if y == 0:
+ # Add a 'Year 0', by changing year=0 to 1.
+ y = 1
+ if calendar == cf_units.CALENDAR_GREGORIAN:
+ days_in_year_0 = 366
+ elif calendar == cf_units.CALENDAR_360_DAY:
+ days_in_year_0 = 360
+ elif calendar == cf_units.CALENDAR_365_DAY:
+ days_in_year_0 = 365
+ else:
+ msg = 'unrecognised calendar : {}'
+ raise ValueError(msg.format(calendar))
+
+ days_offset += days_in_year_0
+
+ # Replace y/m/d with a modified date, that cftime will accept.
+ datetime = datetime.replace(year=y, month=m, day=d)
+
+ # netcdf4python has changed it's behaviour, at version 1.2, such
+ # that a date2num calculation returns a python float, not
+ # numpy.float64. The behaviour of round is to recast this to an
+ # int, which is not the desired behaviour for PP files.
+ # So, cast the answer to numpy.float_ to be safe.
+ epoch_hours = np.float_(epoch_hours_unit.date2num(datetime))
+
+ if days_offset is not None:
+ # Correct for any modifications to achieve a valid date.
+ epoch_hours -= 24.0 * days_offset
+
+ return epoch_hours
+
+
def _convert_time_coords(lbcode, lbtim, epoch_hours_unit,
t1, t2, lbft,
t1_dims=(), t2_dims=(), lbft_dims=()):
@@ -481,12 +555,7 @@ def _convert_time_coords(lbcode, lbtim, epoch_hours_unit,
"""
def date2hours(t):
- # netcdf4python has changed it's behaviour, at version 1.2, such
- # that a date2num calculation returns a python float, not
- # numpy.float64. The behaviour of round is to recast this to an
- # int, which is not the desired behaviour for PP files.
- # So, cast the answer to numpy.float_ to be safe.
- epoch_hours = np.float_(epoch_hours_unit.date2num(t))
+ epoch_hours = _epoch_date_hours(epoch_hours_unit, t)
if t.minute == 0 and t.second == 0:
epoch_hours = round(epoch_hours)
return epoch_hours
diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py
index 0cc0868b1d..e4f3b909a5 100644
--- a/lib/iris/tests/integration/test_pp.py
+++ b/lib/iris/tests/integration/test_pp.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2013 - 2017, Met Office
+# (C) British Crown Copyright 2013 - 2019, Met Office
#
# This file is part of Iris.
#
@@ -48,19 +48,27 @@ def _test_coord(self, cube, point, bounds=None, **kwargs):
if bounds is not None:
self.assertArrayEqual(coords[0].bounds, [bounds])
- def test_soil_level_round_trip(self):
- # Use pp.load_cubes() to convert a fake PPField into a Cube.
- # NB. Use MagicMock so that SplittableInt header items, such as
- # LBCODE, support len().
- soil_level = 1234
+ @staticmethod
+ def _mock_field(**kwargs):
mock_data = np.zeros(1)
mock_core_data = mock.MagicMock(return_value=mock_data)
- field = mock.MagicMock(lbvc=6, lblev=soil_level,
- stash=iris.fileformats.pp.STASH(1, 0, 9),
- lbuser=[0] * 7, lbrsvd=[0] * 4,
+ field = mock.MagicMock(lbuser=[0] * 7, lbrsvd=[0] * 4,
brsvd=[0] * 4, brlev=0,
+ t1=mock.MagicMock(year=1990, month=1, day=3),
+ t2=mock.MagicMock(year=1990, month=1, day=3),
core_data=mock_core_data,
realised_dtype=mock_data.dtype)
+ field.configure_mock(**kwargs)
+ return field
+
+ def test_soil_level_round_trip(self):
+ # Use pp.load_cubes() to convert a fake PPField into a Cube.
+ # NB. Use MagicMock so that SplittableInt header items, such as
+ # LBCODE, support len().
+ soil_level = 1234
+ field = self._mock_field(
+ lbvc=6, lblev=soil_level,
+ stash=iris.fileformats.pp.STASH(1, 0, 9))
load = mock.Mock(return_value=iter([field]))
with mock.patch('iris.fileformats.pp.load', new=load) as load:
cube = next(iris.fileformats.pp.load_cubes('DUMMY'))
@@ -89,14 +97,9 @@ def test_soil_depth_round_trip(self):
# LBCODE, support len().
lower, point, upper = 1.2, 3.4, 5.6
brsvd = [lower, 0, 0, 0]
- mock_data = np.zeros(1)
- mock_core_data = mock.MagicMock(return_value=mock_data)
- field = mock.MagicMock(lbvc=6, blev=point,
- stash=iris.fileformats.pp.STASH(1, 0, 9),
- lbuser=[0] * 7, lbrsvd=[0] * 4,
- brsvd=brsvd, brlev=upper,
- core_data=mock_core_data,
- realised_dtype=mock_data.dtype)
+ field = self._mock_field(
+ lbvc=6, blev=point, brsvd=brsvd, brlev=upper,
+ stash=iris.fileformats.pp.STASH(1, 0, 9))
load = mock.Mock(return_value=iter([field]))
with mock.patch('iris.fileformats.pp.load', new=load) as load:
cube = next(iris.fileformats.pp.load_cubes('DUMMY'))
@@ -126,12 +129,7 @@ def test_potential_temperature_level_round_trip(self):
# NB. Use MagicMock so that SplittableInt header items, such as
# LBCODE, support len().
potm_value = 22.5
- mock_data = np.zeros(1)
- mock_core_data = mock.MagicMock(return_value=mock_data)
- field = mock.MagicMock(lbvc=19, blev=potm_value,
- lbuser=[0] * 7, lbrsvd=[0] * 4,
- core_data=mock_core_data,
- realised_dtype=mock_data.dtype)
+ field = self._mock_field(lbvc=19, blev=potm_value)
load = mock.Mock(return_value=iter([field]))
with mock.patch('iris.fileformats.pp.load', new=load):
cube = next(iris.fileformats.pp.load_cubes('DUMMY'))
@@ -149,40 +147,46 @@ def test_potential_temperature_level_round_trip(self):
self.assertEqual(field.lbvc, 19)
self.assertEqual(field.blev, potm_value)
+ @staticmethod
+ def _field_with_data(scale=1, **kwargs):
+ x, y = 40, 30
+ mock_data = np.arange(1200).reshape(y, x) * scale
+ mock_core_data = mock.MagicMock(return_value=mock_data)
+ field = mock.MagicMock(core_data=mock_core_data,
+ realised_dtype=mock_data.dtype,
+ lbcode=[1],
+ lbnpt=x, lbrow=y, bzx=350, bdx=1.5,
+ bzy=40, bdy=1.5, lbuser=[0] * 7,
+ lbrsvd=[0] * 4,
+ t1=mock.MagicMock(year=1990, month=1, day=3),
+ t2=mock.MagicMock(year=1990, month=1, day=3))
+
+ field._x_coord_name = lambda: 'longitude'
+ field._y_coord_name = lambda: 'latitude'
+ field.coord_system = lambda: None
+ field.configure_mock(**kwargs)
+ return field
+
def test_hybrid_pressure_round_trip(self):
# Use pp.load_cubes() to convert fake PPFields into Cubes.
# NB. Use MagicMock so that SplittableInt header items, such as
# LBCODE, support len().
- def field_with_data(scale=1):
- x, y = 40, 30
- mock_data = np.arange(1200).reshape(y, x) * scale
- mock_core_data = mock.MagicMock(return_value=mock_data)
- field = mock.MagicMock(core_data=mock_core_data,
- realised_dtype=mock_data.dtype,
- lbcode=[1],
- lbnpt=x, lbrow=y, bzx=350, bdx=1.5,
- bzy=40, bdy=1.5, lbuser=[0] * 7,
- lbrsvd=[0] * 4)
-
- field._x_coord_name = lambda: 'longitude'
- field._y_coord_name = lambda: 'latitude'
- field.coord_system = lambda: None
- return field
# Make a fake reference surface field.
- pressure_field = field_with_data(10)
- pressure_field.stash = iris.fileformats.pp.STASH(1, 0, 409)
- pressure_field.lbuser[3] = 409
+ pressure_field = self._field_with_data(
+ 10,
+ stash=iris.fileformats.pp.STASH(1, 0, 409),
+ lbuser=[0, 0, 0, 409, 0, 0, 0])
# Make a fake data field which needs the reference surface.
model_level = 5678
sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95
delta_lower, delta, delta_upper = 0.05, 0.1, 0.15
- data_field = field_with_data()
- data_field.configure_mock(lbvc=9, lblev=model_level,
- bhlev=delta, bhrlev=delta_lower,
- blev=sigma, brlev=sigma_lower,
- brsvd=[sigma_upper, delta_upper])
+ data_field = self._field_with_data(
+ lbvc=9, lblev=model_level,
+ bhlev=delta, bhrlev=delta_lower,
+ blev=sigma, brlev=sigma_lower,
+ brsvd=[sigma_upper, delta_upper])
# Convert both fields to cubes.
load = mock.Mock(return_value=iter([pressure_field, data_field]))
@@ -236,35 +240,21 @@ def field_with_data(scale=1):
self.assertEqual(data_field.brsvd, [sigma_upper, delta_upper])
def test_hybrid_pressure_with_duplicate_references(self):
- def field_with_data(scale=1):
- x, y = 40, 30
- mock_data = np.arange(1200).reshape(y, x) * scale
- mock_core_data = mock.MagicMock(return_value=mock_data)
- field = mock.MagicMock(core_data=mock_core_data,
- realised_dtype=mock_data.dtype,
- lbcode=[1],
- lbnpt=x, lbrow=y, bzx=350, bdx=1.5,
- bzy=40, bdy=1.5, lbuser=[0] * 7,
- lbrsvd=[0] * 4)
- field._x_coord_name = lambda: 'longitude'
- field._y_coord_name = lambda: 'latitude'
- field.coord_system = lambda: None
- return field
-
# Make a fake reference surface field.
- pressure_field = field_with_data(10)
- pressure_field.stash = iris.fileformats.pp.STASH(1, 0, 409)
- pressure_field.lbuser[3] = 409
+ pressure_field = self._field_with_data(
+ 10,
+ stash=iris.fileformats.pp.STASH(1, 0, 409),
+ lbuser=[0, 0, 0, 409, 0, 0, 0])
# Make a fake data field which needs the reference surface.
model_level = 5678
sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95
delta_lower, delta, delta_upper = 0.05, 0.1, 0.15
- data_field = field_with_data()
- data_field.configure_mock(lbvc=9, lblev=model_level,
- bhlev=delta, bhrlev=delta_lower,
- blev=sigma, brlev=sigma_lower,
- brsvd=[sigma_upper, delta_upper])
+ data_field = self._field_with_data(
+ lbvc=9, lblev=model_level,
+ bhlev=delta, bhrlev=delta_lower,
+ blev=sigma, brlev=sigma_lower,
+ brsvd=[sigma_upper, delta_upper])
# Convert both fields to cubes.
load = mock.Mock(return_value=iter([data_field,
@@ -351,30 +341,15 @@ def test_hybrid_height_round_trip_no_reference(self):
# Use pp.load_cubes() to convert fake PPFields into Cubes.
# NB. Use MagicMock so that SplittableInt header items, such as
# LBCODE, support len().
- def field_with_data(scale=1):
- x, y = 40, 30
- mock_data = np.arange(1200).reshape(y, x) * scale
- mock_core_data = mock.MagicMock(return_value=mock_data)
- field = mock.MagicMock(core_data=mock_core_data,
- realised_dtype=mock_data.dtype,
- lbcode=[1],
- lbnpt=x, lbrow=y, bzx=350, bdx=1.5,
- bzy=40, bdy=1.5, lbuser=[0] * 7,
- lbrsvd=[0] * 4)
- field._x_coord_name = lambda: 'longitude'
- field._y_coord_name = lambda: 'latitude'
- field.coord_system = lambda: None
- return field
-
# Make a fake data field which needs the reference surface.
model_level = 5678
sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95
delta_lower, delta, delta_upper = 0.05, 0.1, 0.15
- data_field = field_with_data()
- data_field.configure_mock(lbvc=65, lblev=model_level,
- bhlev=sigma, bhrlev=sigma_lower,
- blev=delta, brlev=delta_lower,
- brsvd=[delta_upper, sigma_upper])
+ data_field = self._field_with_data(
+ lbvc=65, lblev=model_level,
+ bhlev=sigma, bhrlev=sigma_lower,
+ blev=delta, brlev=delta_lower,
+ brsvd=[delta_upper, sigma_upper])
# Convert field to a cube.
load = mock.Mock(return_value=iter([data_field]))
diff --git a/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml b/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml
index 0139d1d2fb..5bba278059 100644
--- a/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml
+++ b/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml
@@ -396,8 +396,8 @@
-
+
@@ -919,8 +919,8 @@
-
+
diff --git a/lib/iris/tests/results/grib_load/polar_stereo_grib1.cml b/lib/iris/tests/results/grib_load/polar_stereo_grib1.cml
index 2ba2e8205b..f8e03e6d18 100644
--- a/lib/iris/tests/results/grib_load/polar_stereo_grib1.cml
+++ b/lib/iris/tests/results/grib_load/polar_stereo_grib1.cml
@@ -3,7 +3,7 @@
-
+
diff --git a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml
index 070e7597d3..a89756a77e 100644
--- a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml
+++ b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml
@@ -414,8 +414,8 @@
-
+
diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py
index f00b39156f..898df79620 100644
--- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py
+++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2014 - 2018, Met Office
+# (C) British Crown Copyright 2014 - 2019, Met Office
#
# This file is part of Iris.
#
@@ -140,8 +140,8 @@ def test_not_exact_hours(self):
lbcode=_lbcode(1), lbtim=lbtim, epoch_hours_unit=_EPOCH_HOURS_UNIT,
t1=t1, t2=t2, lbft=None)
(fp, _), (t, _), (frt, _) = coords_and_dims
- self.assertEqual(fp.points[0], 7.1666666641831398)
- self.assertEqual(t.points[0], 394927.16666666418)
+ self.assertArrayAllClose(fp.points[0], 7.1666666, atol=0.0001, rtol=0)
+ self.assertArrayAllClose(t.points[0], 394927.166666, atol=0.01, rtol=0)
class TestLBTIMx2x_TimePeriod(TestField):
diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py
new file mode 100644
index 0000000000..0209df5dd0
--- /dev/null
+++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py
@@ -0,0 +1,150 @@
+# (C) British Crown Copyright 2019, Met Office
+#
+# This file is part of Iris.
+#
+# Iris is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Iris is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Iris. If not, see .
+"""
+Unit tests for
+:func:`iris.fileformats.pp_load_rules._epoch_date_hours`.
+
+"""
+from __future__ import (absolute_import, division, print_function)
+from six.moves import (filter, input, map, range, zip) # noqa
+
+# Import iris.tests first so that some things can be initialised before
+# importing anything else.
+import iris.tests as tests
+
+import cf_units
+from cf_units import Unit
+from cftime import datetime as nc_datetime
+
+from iris.fileformats.pp_load_rules \
+ import _epoch_date_hours as epoch_hours_call
+
+
+#
+# Run tests for each of the possible calendars from PPfield.calendar().
+# Test year=0 and all=0 cases, plus "normal" dates, for each calendar.
+# Result values are the same as from 'date2num' in cftime version <= 1.0.1.
+#
+
+class TestEpochHours__gregorian(tests.IrisTest):
+ def setUp(self):
+ self.hrs_unit = Unit('hours since epoch',
+ calendar=cf_units.CALENDAR_GREGORIAN)
+
+ def test_1970_1_1(self):
+ test_date = nc_datetime(1970, 1, 1)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, 0.0)
+
+ def test_ymd_1_1_1(self):
+ test_date = nc_datetime(1, 1, 1)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, -17259936.0)
+
+ def test_year_0(self):
+ test_date = nc_datetime(0, 1, 1)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, -17268720.0)
+
+ def test_ymd_0_0_0(self):
+ test_date = nc_datetime(0, 0, 0)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, -17269488.0)
+
+ def test_ymd_0_preserves_timeofday(self):
+ hrs, mins, secs, usecs = (7, 13, 24, 335772)
+ hours_in_day = (hrs +
+ 1./60 * mins +
+ 1./3600 * secs +
+ (1.0e-6) / 3600 * usecs)
+ test_date = nc_datetime(0, 0, 0,
+ hour=hrs, minute=mins, second=secs,
+ microsecond=usecs)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ # NOTE: the calculation is only accurate to approx +/- 0.5 seconds
+ # in such a large number of hours -- even 0.1 seconds is too fine.
+ absolute_tolerance = 0.5 / 3600
+ self.assertArrayAllClose(result, -17269488.0 + hours_in_day,
+ rtol=0, atol=absolute_tolerance)
+
+
+class TestEpochHours__360day(tests.IrisTest):
+ def setUp(self):
+ self.hrs_unit = Unit('hours since epoch',
+ calendar=cf_units.CALENDAR_360_DAY)
+
+ def test_1970_1_1(self):
+ test_date = nc_datetime(1970, 1, 1)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, 0.0)
+
+ def test_ymd_1_1_1(self):
+ test_date = nc_datetime(1, 1, 1)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, -17012160.0)
+
+ def test_year_0(self):
+ test_date = nc_datetime(0, 1, 1)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, -17020800.0)
+
+ def test_ymd_0_0_0(self):
+ test_date = nc_datetime(0, 0, 0)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, -17021544.0)
+
+
+class TestEpochHours__365day(tests.IrisTest):
+ def setUp(self):
+ self.hrs_unit = Unit('hours since epoch',
+ calendar=cf_units.CALENDAR_365_DAY)
+
+ def test_1970_1_1(self):
+ test_date = nc_datetime(1970, 1, 1)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, 0.0)
+
+ def test_ymd_1_1_1(self):
+ test_date = nc_datetime(1, 1, 1)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, -17248440.0)
+
+ def test_year_0(self):
+ test_date = nc_datetime(0, 1, 1)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, -17257200.0)
+
+ def test_ymd_0_0_0(self):
+ test_date = nc_datetime(0, 0, 0)
+ result = epoch_hours_call(self.hrs_unit, test_date)
+ self.assertEqual(result, -17257968.0)
+
+
+class TestEpochHours__invalid_calendar(tests.IrisTest):
+ def test_bad_calendar(self):
+ # Setup a unit with an unrecognised calendar
+ hrs_unit = Unit('hours since epoch',
+ calendar=cf_units.CALENDAR_ALL_LEAP)
+ # Test against a date with year=0, which requires calendar correction.
+ test_date = nc_datetime(0, 1, 1)
+ # Check that this causes an error.
+ with self.assertRaisesRegexp(ValueError, 'unrecognised calendar'):
+ epoch_hours_call(hrs_unit, test_date)
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py
index c1f8bdb891..77761d431b 100644
--- a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py
+++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2013 - 2018, Met Office
+# (C) British Crown Copyright 2013 - 2019, Met Office
#
# This file is part of Iris.
#
@@ -39,6 +39,14 @@
import iris.tests.unit.fileformats
+def _mock_field(**kwargs):
+ # Generate a mock field, but ensure T1 and T2 viable for rules.
+ field = mock.MagicMock(t1=mock.MagicMock(year=1990, month=3, day=7),
+ t2=mock.MagicMock(year=1990, month=3, day=7))
+ field.configure_mock(**kwargs)
+ return field
+
+
class TestLBCODE(iris.tests.unit.fileformats.TestField):
@staticmethod
def _is_cross_section_height_coord(coord):
@@ -50,7 +58,7 @@ def test_cross_section_height_bdy_zero(self):
lbcode = SplittableInt(19902, {'iy': slice(0, 2), 'ix': slice(2, 4)})
points = np.array([10, 20, 30, 40])
bounds = np.array([[0, 15], [15, 25], [25, 35], [35, 45]])
- field = mock.MagicMock(lbcode=lbcode, bdy=0, y=points, y_bounds=bounds)
+ field = _mock_field(lbcode=lbcode, bdy=0, y=points, y_bounds=bounds)
self._test_for_coord(field, convert,
TestLBCODE._is_cross_section_height_coord,
expected_points=points,
@@ -61,8 +69,8 @@ def test_cross_section_height_bdy_bmdi(self):
points = np.array([10, 20, 30, 40])
bounds = np.array([[0, 15], [15, 25], [25, 35], [35, 45]])
bmdi = -1.07374e+09
- field = mock.MagicMock(lbcode=lbcode, bdy=bmdi, bmdi=bmdi,
- y=points, y_bounds=bounds)
+ field = _mock_field(lbcode=lbcode, bdy=bmdi, bmdi=bmdi,
+ y=points, y_bounds=bounds)
self._test_for_coord(field, convert,
TestLBCODE._is_cross_section_height_coord,
expected_points=points,
@@ -105,7 +113,7 @@ def _is_soil_depth_coord(coord):
def test_soil_levels(self):
level = 1234
- field = mock.MagicMock(lbvc=6, lblev=level, brsvd=[0, 0], brlev=0)
+ field = _mock_field(lbvc=6, lblev=level, brsvd=[0, 0], brlev=0)
self._test_for_coord(field, convert,
self._is_soil_model_level_number_coord,
expected_points=[level],
@@ -113,8 +121,7 @@ def test_soil_levels(self):
def test_soil_depth(self):
lower, point, upper = 1.2, 3.4, 5.6
- field = mock.MagicMock(lbvc=6, blev=point, brsvd=[lower, 0],
- brlev=upper)
+ field = _mock_field(lbvc=6, blev=point, brsvd=[lower, 0], brlev=upper)
self._test_for_coord(field, convert,
self._is_soil_depth_coord,
expected_points=[point],
@@ -122,9 +129,9 @@ def test_soil_depth(self):
def test_hybrid_pressure_model_level_number(self):
level = 5678
- field = mock.MagicMock(lbvc=9, lblev=level,
- blev=20, brlev=23, bhlev=42,
- bhrlev=45, brsvd=[17, 40])
+ field = _mock_field(lbvc=9, lblev=level,
+ blev=20, brlev=23, bhlev=42,
+ bhrlev=45, brsvd=[17, 40])
self._test_for_coord(field, convert,
TestLBVC._is_model_level_number_coord,
expected_points=[level],
@@ -134,10 +141,10 @@ def test_hybrid_pressure_delta(self):
delta_point = 12.0
delta_lower_bound = 11.0
delta_upper_bound = 13.0
- field = mock.MagicMock(lbvc=9, lblev=5678,
- blev=20, brlev=23, bhlev=delta_point,
- bhrlev=delta_lower_bound,
- brsvd=[17, delta_upper_bound])
+ field = _mock_field(lbvc=9, lblev=5678,
+ blev=20, brlev=23, bhlev=delta_point,
+ bhrlev=delta_lower_bound,
+ brsvd=[17, delta_upper_bound])
self._test_for_coord(field, convert,
TestLBVC._is_level_pressure_coord,
expected_points=[delta_point],
@@ -148,10 +155,10 @@ def test_hybrid_pressure_sigma(self):
sigma_point = 0.5
sigma_lower_bound = 0.6
sigma_upper_bound = 0.4
- field = mock.MagicMock(lbvc=9, lblev=5678,
- blev=sigma_point, brlev=sigma_lower_bound,
- bhlev=12, bhrlev=11,
- brsvd=[sigma_upper_bound, 13])
+ field = _mock_field(lbvc=9, lblev=5678,
+ blev=sigma_point, brlev=sigma_lower_bound,
+ bhlev=12, bhrlev=11,
+ brsvd=[sigma_upper_bound, 13])
self._test_for_coord(field, convert, TestLBVC._is_sigma_coord,
expected_points=[sigma_point],
expected_bounds=[[sigma_lower_bound,
@@ -159,7 +166,7 @@ def test_hybrid_pressure_sigma(self):
def test_potential_temperature_levels(self):
potm_value = 27.32
- field = mock.MagicMock(lbvc=19, blev=potm_value)
+ field = _mock_field(lbvc=19, blev=potm_value)
self._test_for_coord(field, convert, TestLBVC._is_potm_level_coord,
expected_points=np.array([potm_value]),
expected_bounds=None)
@@ -265,7 +272,7 @@ def test_realization(self):
lbrsvd[3] = 71
points = np.array([71])
bounds = None
- field = mock.MagicMock(lbrsvd=lbrsvd)
+ field = _mock_field(lbrsvd=lbrsvd)
self._test_for_coord(field, convert,
TestLBRSVD._is_realization,
expected_points=points,
@@ -275,7 +282,7 @@ def test_realization(self):
class TestLBSRCE(iris.tests.IrisTest):
def check_um_source_attrs(self, lbsrce,
source_str=None, um_version_str=None):
- field = mock.MagicMock(lbsrce=lbsrce)
+ field = _mock_field(lbsrce=lbsrce)
(factories, references, standard_name, long_name, units,
attributes, cell_methods, dim_coords_and_dims,
aux_coords_and_dims) = convert(field)
@@ -310,7 +317,7 @@ def test_stash_cf_air_temp(self):
lbuser = [1, 0, 0, 16203, 0, 0, 1]
lbfc = 16
stash = STASH(lbuser[6], lbuser[3] // 1000, lbuser[3] % 1000)
- field = mock.MagicMock(lbuser=lbuser, lbfc=lbfc, stash=stash)
+ field = _mock_field(lbuser=lbuser, lbfc=lbfc, stash=stash)
(factories, references, standard_name, long_name, units,
attributes, cell_methods, dim_coords_and_dims,
aux_coords_and_dims) = convert(field)
@@ -321,7 +328,7 @@ def test_no_std_name(self):
lbuser = [1, 0, 0, 0, 0, 0, 0]
lbfc = 0
stash = STASH(lbuser[6], lbuser[3] // 1000, lbuser[3] % 1000)
- field = mock.MagicMock(lbuser=lbuser, lbfc=lbfc, stash=stash)
+ field = _mock_field(lbuser=lbuser, lbfc=lbfc, stash=stash)
(factories, references, standard_name, long_name, units,
attributes, cell_methods, dim_coords_and_dims,
aux_coords_and_dims) = convert(field)
@@ -334,7 +341,7 @@ def test_fc_cf_air_temp(self):
lbuser = [1, 0, 0, 0, 0, 0, 0]
lbfc = 16
stash = STASH(lbuser[6], lbuser[3] // 1000, lbuser[3] % 1000)
- field = mock.MagicMock(lbuser=lbuser, lbfc=lbfc, stash=stash)
+ field = _mock_field(lbuser=lbuser, lbfc=lbfc, stash=stash)
(factories, references, standard_name, long_name, units,
attributes, cell_methods, dim_coords_and_dims,
aux_coords_and_dims) = convert(field)
diff --git a/requirements/core.txt b/requirements/core.txt
index fd50d69ce6..e39777edb7 100644
--- a/requirements/core.txt
+++ b/requirements/core.txt
@@ -6,7 +6,7 @@
cartopy
#conda: proj4<5
cf-units>=2
-cftime==1.0.1
+cftime
dask[array] #conda: dask
matplotlib>=2,<3
netcdf4