Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions lib/iris/analysis/maths.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# (C) British Crown Copyright 2010 - 2013, Met Office
# (C) British Crown Copyright 2010 - 2014, Met Office
#
# This file is part of Iris.
#
Expand Down Expand Up @@ -536,7 +536,9 @@ def _math_op_common(cube, operation_function, new_unit, in_place=False):
new_cube = cube
operation_function(new_cube.data, out=new_cube.data)
else:
new_cube = cube.copy(data=operation_function(cube.data))
# use a slice to shallow copy the cube
new_cube = cube[:]
new_cube.data = operation_function(cube.data)
iris.analysis.clear_phenomenon_identity(new_cube)
new_cube.units = new_unit
return new_cube
4 changes: 3 additions & 1 deletion lib/iris/coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,9 @@ def copy(self, points=None, bounds=None):
raise ValueError('If bounds are specified, points must also be '
'specified')

new_coord = copy.deepcopy(self)
new_coord = copy.copy(self)
new_coord.attributes = copy.deepcopy(self.attributes)
new_coord.coord_system = copy.deepcopy(self.coord_system)
if points is not None:
# Explicitly not using the points property as we don't want the
# shape the new points to be constrained by the shape of
Expand Down
18 changes: 6 additions & 12 deletions lib/iris/cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -551,7 +551,7 @@ def __init__(self, data, standard_name=None, long_name=None,

if data_manager is not None:
self._data = data
self._data_manager = data_manager
self._data_manager = copy.deepcopy(data_manager)
else:
if isinstance(data, np.ndarray):
self._data = data
Expand Down Expand Up @@ -1760,22 +1760,17 @@ def __getitem__(self, keys):
data = self.data[first_slice]
else:
if use_data_proxy:
data = copy.deepcopy(self._data)
data = self._data
data_manager = copy.deepcopy(self._data_manager)
else:
data = copy.deepcopy(self.data)
data = self.data

for other_slice in slice_gen:
if use_data_proxy:
data, data_manager = data_manager.getitem(data, other_slice)
else:
data = data[other_slice]

# We don't want a view of the numpy array, so take a copy of it if
# it's not our own (this applies to proxy "empty data" arrays too)
if not data.flags['OWNDATA']:
data = data.copy()

# We can turn a masked array into a normal array if it's full.
if isinstance(data, ma.core.MaskedArray):
if ma.count_masked(data) == 0:
Expand Down Expand Up @@ -2012,10 +2007,9 @@ def transpose(self, new_order=None):
elif len(new_order) != self.data.ndim:
raise ValueError('Incorrect number of dimensions.')

# The data needs to be copied, otherwise this view of the transposed
# data will not be contiguous. Ensure not to assign via the cube.data
# setter property since we are reshaping the cube payload in-place.
self._data = np.transpose(self.data, new_order).copy()
# Ensure not to assign via the cube.data setter property since we are
# reshaping the cube payload in-place.
self._data = np.transpose(self.data, new_order)

dim_mapping = {src: dest for dest, src in enumerate(new_order)}

Expand Down