Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
20 changes: 9 additions & 11 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,16 @@ git:
depth: 10000

install:
- export IRIS_TEST_DATA_REF="e8e62ab79a2f8789f01f8dfa9829915984a39b2a"
- export IRIS_TEST_DATA_REF="a754fc977d30fdcdacb820b5a5fabd91056afc7b"
- export IRIS_TEST_DATA_SUFFIX=$(echo "${IRIS_TEST_DATA_REF}" | sed "s/^v//")

# Install miniconda
# -----------------
- export CONDA_BASE=http://repo.continuum.io/miniconda/Miniconda
- export CONDA_BASE=https://repo.continuum.io/miniconda/Miniconda
- if [[ "$TRAVIS_PYTHON_VERSION" == 2* ]]; then
wget ${CONDA_BASE}-3.7.0-Linux-x86_64.sh -O miniconda.sh;
wget ${CONDA_BASE}2-latest-Linux-x86_64.sh -O miniconda.sh;
else
wget ${CONDA_BASE}3-3.7.0-Linux-x86_64.sh -O miniconda.sh;
wget ${CONDA_BASE}3-latest-Linux-x86_64.sh -O miniconda.sh;
fi
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
Expand Down Expand Up @@ -60,10 +60,6 @@ install:
fi
fi

# Perceptual image hashing (TBD: push recipe to conda-forge!)
- conda install pip
- pip install imagehash

- PREFIX=$HOME/miniconda/envs/$ENV_NAME

# Output debug info
Expand All @@ -75,9 +71,11 @@ install:
- python -c 'import cartopy; cartopy.io.shapereader.natural_earth()'

# iris test data
- wget -O iris-test-data.zip https://github.com/SciTools/iris-test-data/archive/${IRIS_TEST_DATA_REF}.zip
- unzip -q iris-test-data.zip
- ln -s $(pwd)/iris-test-data-${IRIS_TEST_DATA_SUFFIX} iris-test-data
- if [[ "$TEST_MINIMAL" != true ]]; then
wget -O iris-test-data.zip https://github.com/SciTools/iris-test-data/archive/${IRIS_TEST_DATA_REF}.zip;
unzip -q iris-test-data.zip;
ln -s $(pwd)/iris-test-data-${IRIS_TEST_DATA_SUFFIX} iris-test-data;
fi

# prepare iris build directory
- python setup.py --with-unpack build_ext --include-dirs=${PREFIX}/include --library-dirs=${PREFIX}/lib
Expand Down
1 change: 1 addition & 0 deletions conda-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ pep8
sphinx
iris_sample_data
filelock
imagehash

# Optional iris dependencies
nc_time_axis
Expand Down
10 changes: 5 additions & 5 deletions lib/iris/coord_systems.py
Original file line number Diff line number Diff line change
Expand Up @@ -914,11 +914,11 @@ def __repr__(self):
return ("LambertAzimuthalEqualArea(latitude_of_projection_origin={!r},"
" longitude_of_projection_origin={!r}, false_easting={!r},"
" false_northing={!r}, ellipsoid={!r})").format(
self.latitude_of_projection_origin,
self.longitude_of_projection_origin,
self.false_easting,
self.false_northing,
self.ellipsoid)
self.latitude_of_projection_origin,
self.longitude_of_projection_origin,
self.false_easting,
self.false_northing,
self.ellipsoid)

def as_cartopy_crs(self):
if self.ellipsoid is not None:
Expand Down
3 changes: 3 additions & 0 deletions lib/iris/cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -1835,6 +1835,9 @@ def summary(self, shorten=False, name_padding=35):

nameunit = '{name} / ({units})'.format(name=self.name(),
units=self.units)
# If all unknown and a STASH attribute exists, use it.
if nameunit == 'unknown / (unknown)' and 'STASH' in self.attributes:
nameunit = '{}'.format(self.attributes['STASH'])
cube_header = '{nameunit!s:{length}} ({dimension})'.format(
length=name_padding,
nameunit=nameunit,
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/experimental/regrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,8 @@ def _within_bounds(src_bounds, tgt_bounds, orderswap=False):
extremes of the source bounds.

"""
min_bound = np.min(src_bounds)
max_bound = np.max(src_bounds)
min_bound = np.min(src_bounds) - 1e-14
max_bound = np.max(src_bounds) + 1e-14

# Swap upper-lower is necessary.
if orderswap is True:
Expand Down
34 changes: 0 additions & 34 deletions lib/iris/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@

import collections
import datetime
from functools import wraps
import threading

import cartopy.crs as ccrs
import cartopy.mpl.geoaxes
Expand Down Expand Up @@ -56,26 +54,6 @@

PlotDefn = collections.namedtuple('PlotDefn', ('coords', 'transpose'))

# Threading reentrant lock to ensure thread-safe plotting.
_lock = threading.RLock()


def _locker(func):
"""
Decorator that ensures a thread-safe atomic operation is
performed by the decorated function.

Uses a shared threading reentrant lock to provide thread-safe
plotting by public API functions.

"""
@wraps(func)
def decorated_func(*args, **kwargs):
with _lock:
result = func(*args, **kwargs)
return result
return decorated_func


def _get_plot_defn_custom_coords_picked(cube, coords, mode, ndims=2):
def names(coords):
Expand Down Expand Up @@ -686,7 +664,6 @@ def _map_common(draw_method_name, arg_func, mode, cube, plot_defn,
return plotfn(*new_args, **kwargs)


@_locker
def contour(cube, *args, **kwargs):
"""
Draws contour lines based on the given Cube.
Expand All @@ -711,7 +688,6 @@ def contour(cube, *args, **kwargs):
return result


@_locker
def contourf(cube, *args, **kwargs):
"""
Draws filled contours based on the given Cube.
Expand Down Expand Up @@ -838,7 +814,6 @@ def _fill_orography(cube, coords, mode, vert_plot, horiz_plot, style_args):
return result


@_locker
def orography_at_bounds(cube, facecolor='#888888', coords=None, axes=None):
"""Plots orography defined at cell boundaries from the given Cube."""

Expand Down Expand Up @@ -869,7 +844,6 @@ def horiz_plot(v_coord, orography, style_args):
horiz_plot, style_args)


@_locker
def orography_at_points(cube, facecolor='#888888', coords=None, axes=None):
"""Plots orography defined at sample points from the given Cube."""

Expand All @@ -891,7 +865,6 @@ def horiz_plot(v_coord, orography, style_args):
horiz_plot, style_args)


@_locker
def outline(cube, coords=None, color='k', linewidth=None, axes=None):
"""
Draws cell outlines based on the given Cube.
Expand Down Expand Up @@ -929,7 +902,6 @@ def outline(cube, coords=None, color='k', linewidth=None, axes=None):
return result


@_locker
def pcolor(cube, *args, **kwargs):
"""
Draws a pseudocolor plot based on the given Cube.
Expand All @@ -956,7 +928,6 @@ def pcolor(cube, *args, **kwargs):
return result


@_locker
def pcolormesh(cube, *args, **kwargs):
"""
Draws a pseudocolor plot based on the given Cube.
Expand All @@ -981,7 +952,6 @@ def pcolormesh(cube, *args, **kwargs):
return result


@_locker
def points(cube, *args, **kwargs):
"""
Draws sample point positions based on the given Cube.
Expand Down Expand Up @@ -1009,7 +979,6 @@ def _scatter_args(u, v, data, *args, **kwargs):
*args, **kwargs)


@_locker
def plot(*args, **kwargs):
"""
Draws a line plot based on the given cube(s) or coordinate(s).
Expand Down Expand Up @@ -1054,7 +1023,6 @@ def plot(*args, **kwargs):
return _draw_1d_from_points('plot', _plot_args, *args, **kwargs)


@_locker
def scatter(x, y, *args, **kwargs):
"""
Draws a scatter plot based on the given cube(s) or coordinate(s).
Expand Down Expand Up @@ -1090,7 +1058,6 @@ def scatter(x, y, *args, **kwargs):
show = plt.show


@_locker
def symbols(x, y, symbols, size, axes=None, units='inches'):
"""
Draws fixed-size symbols.
Expand Down Expand Up @@ -1154,7 +1121,6 @@ def symbols(x, y, symbols, size, axes=None, units='inches'):
axes.autoscale_view()


@_locker
def citation(text, figure=None, axes=None):
"""
Add a text citation to a plot.
Expand Down
103 changes: 46 additions & 57 deletions lib/iris/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
import json
import io
import logging
import math
import os
import os.path
import shutil
Expand Down Expand Up @@ -183,7 +184,10 @@ def get_data_path(relative_path):
"""
if not isinstance(relative_path, six.string_types):
relative_path = os.path.join(*relative_path)
data_path = os.path.join(iris.config.TEST_DATA_DIR, relative_path)
test_data_dir = iris.config.TEST_DATA_DIR
if test_data_dir is None:
test_data_dir = ''
data_path = os.path.join(test_data_dir, relative_path)

if _EXPORT_DATAPATHS_FILE is not None:
_EXPORT_DATAPATHS_FILE.write(data_path + '\n')
Expand Down Expand Up @@ -281,12 +285,11 @@ def assertCMLApproxData(self, cubes, reference_filename=None, *args,
reference_filename = [self.get_result_path(reference_filename)]
for i, cube in enumerate(cubes):
fname = list(reference_filename)
# don't want the ".cml" for the numpy data file
# don't want the ".cml" for the json stats file
if fname[-1].endswith(".cml"):
fname[-1] = fname[-1][:-4]
fname[-1] += '.data.%d.npy' % i
fname[-1] += '.data.%d.json' % i
self.assertCubeDataAlmostEqual(cube, fname, *args, **kwargs)

self.assertCML(cubes, reference_filename, checksum=False)

def assertCDL(self, netcdf_filename, reference_filename=None, flags='-h'):
Expand Down Expand Up @@ -398,32 +401,38 @@ def assertTextFile(self, source_filename, reference_filename, desc="text file"):
diff = ''.join(difflib.unified_diff(reference_text, source_text, 'Reference', 'Test result', '', '', 0))
self.fail("%s does not match reference file: %s\n%s" % (desc, reference_filename, diff))

def assertCubeDataAlmostEqual(self, cube, reference_filename, *args, **kwargs):
def assertCubeDataAlmostEqual(self, cube, reference_filename, *args,
**kwargs):
reference_path = self.get_result_path(reference_filename)
if self._check_reference_file(reference_path):
kwargs.setdefault('err_msg', 'Reference file %s' % reference_path)

result = np.load(reference_path)
if isinstance(result, np.lib.npyio.NpzFile):
self.assertIsInstance(cube.data, ma.MaskedArray, 'Cube data was not a masked array.')
# Avoid comparing any non-initialised array data.
data = cube.data.filled()
np.testing.assert_array_almost_equal(data, result['data'],
*args, **kwargs)
np.testing.assert_array_equal(cube.data.mask, result['mask'])
else:
np.testing.assert_array_almost_equal(cube.data, result, *args, **kwargs)
with open(reference_path, 'r') as reference_file:
stats = json.load(reference_file)
self.assertEqual(stats.get('shape', []), list(cube.shape))
self.assertEqual(stats.get('masked', False),
isinstance(cube.data, ma.MaskedArray))
nstats = np.array((stats.get('mean', 0.), stats.get('std', 0.),
stats.get('max', 0), stats.get('min', 0)))
if math.isnan(stats.get('mean', 0)):
self.assertEqual(math.isnan(stats.get('mean', 0)),
math.isnan(cube.data.mean()))
else:
cube_stats = np.array((cube.data.mean(), cube.data.std(),
cube.data.max(), cube.data.min()))
self.assertArrayAllClose(nstats, cube_stats, **kwargs)
else:
self._ensure_folder(reference_path)
logger.warning('Creating result file: %s', reference_path)
masked = False
if isinstance(cube.data, ma.MaskedArray):
# Avoid recording any non-initialised array data.
data = cube.data.filled()
with open(reference_path, 'wb') as reference_file:
np.savez(reference_file, data=data, mask=cube.data.mask)
else:
with open(reference_path, 'wb') as reference_file:
np.save(reference_file, cube.data)
masked = True
stats = {'mean': np.float_(cube.data.mean()),
'std': np.float_(cube.data.std()),
'max': np.float_(cube.data.max()),
'min': np.float_(cube.data.min()),
'shape': cube.shape, 'masked': masked}
with open(reference_path, 'w') as reference_file:
reference_file.write(json.dumps(stats))

def assertFilesEqual(self, test_filename, reference_filename):
reference_path = self.get_result_path(reference_filename)
Expand Down Expand Up @@ -666,7 +675,7 @@ def _ensure_folder(self, path):
logger.warning('Creating folder: %s', dir_path)
os.makedirs(dir_path)

def _assert_graphic(self, tol=_HAMMING_DISTANCE):
def check_graphic(self):
"""
Check the hash of the current matplotlib figure matches the expected
image hash for the current graphic test.
Expand Down Expand Up @@ -725,24 +734,15 @@ def _create_missing():
figure.savefig(hash_fname)
msg = 'Creating imagerepo entry: {} -> {}'
print(msg.format(unique_id, uri))
with open(repo_fname, 'wb') as fo:
json.dump(repo, codecs.getwriter('utf-8')(fo), indent=4,
sort_keys=True)

# TBD: Push this fix to imagehash (done!)
# See https://github.com/JohannesBuchner/imagehash/pull/31
# Now need this imagehash/master pushed to pypi ...
def _hex_to_hash(hexstr, hash_size=_HASH_SIZE):
l = []
count = hash_size * (hash_size // 4)
if len(hexstr) != count:
emsg = 'Expected hex string size of {}.'
raise ValueError(emsg.format(count))
for i in range(count // 2):
h = hexstr[i*2:i*2+2]
v = int("0x" + h, 16)
l.append([v & 2**i > 0 for i in range(8)])
return imagehash.ImageHash(np.array(l))
lock = filelock.FileLock(os.path.join(_RESULT_PATH,
'imagerepo.lock'))
# The imagerepo.json file is a critical resource, so ensure
# thread safe read/write behaviour via platform independent
# file locking.
with lock.acquire(timeout=600):
with open(repo_fname, 'wb') as fo:
json.dump(repo, codecs.getwriter('utf-8')(fo),
indent=4, sort_keys=True)

# Calculate the test result perceptual image hash.
buffer = io.BytesIO()
Expand All @@ -761,13 +761,15 @@ def _hex_to_hash(hexstr, hash_size=_HASH_SIZE):
else:
uris = repo[unique_id]
# Create the expected perceptual image hashes from the uris.
expected = [_hex_to_hash(os.path.splitext(os.path.basename(uri))[0])
to_hash = imagehash.hex_to_hash
expected = [to_hash(os.path.splitext(os.path.basename(uri))[0],
hash_size=_HASH_SIZE)
for uri in uris]

# Calculate the hamming distance vector for the result hash.
distances = [e - phash for e in expected]

if np.all([hd > tol for hd in distances]):
if np.all([hd > _HAMMING_DISTANCE for hd in distances]):
if dev_mode:
_create_missing()
else:
Expand All @@ -788,19 +790,6 @@ def _hex_to_hash(hexstr, hash_size=_HASH_SIZE):
finally:
plt.close()

def check_graphic(self):
"""
Checks that the image hash for the current matplotlib figure matches
the expected image hash for the current test.

"""
fname = os.path.join(_RESULT_PATH, 'imagerepo.lock')
lock = filelock.FileLock(fname)
# The imagerepo.json file is a critical resource, so ensure thread
# safe read/write behaviour via platform independent file locking.
with lock.acquire(timeout=600):
self._assert_graphic()

def _remove_testcase_patches(self):
"""Helper to remove per-testcase patches installed by :meth:`patch`."""
# Remove all patches made, ignoring errors.
Expand Down
Loading