Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
d0bbf3b
initial ci.yml
bjlittle Jan 12, 2022
44a4a25
wip
bjlittle Jan 12, 2022
e533e28
wip
bjlittle Jan 12, 2022
d3bbbc8
wip
bjlittle Jan 12, 2022
361e564
wip
bjlittle Jan 12, 2022
157e080
wip
bjlittle Jan 12, 2022
40efc7e
wip
bjlittle Jan 12, 2022
62db680
wip
bjlittle Jan 12, 2022
6ff1321
wip
bjlittle Jan 12, 2022
c1ea1ef
wip
bjlittle Jan 12, 2022
ebc8198
wip
bjlittle Jan 12, 2022
f349ead
wip
bjlittle Jan 12, 2022
586544e
wip
bjlittle Jan 12, 2022
2e6d158
wip
bjlittle Jan 12, 2022
8967279
wip
bjlittle Jan 12, 2022
cb27046
wip
bjlittle Jan 13, 2022
62b004a
wip
bjlittle Jan 13, 2022
4a482a2
wip
bjlittle Jan 13, 2022
187aa25
wip
bjlittle Jan 13, 2022
d635fb1
wip
bjlittle Jan 13, 2022
22de88a
wip
bjlittle Jan 13, 2022
4d1463c
wip
bjlittle Jan 13, 2022
9d77f50
wip
bjlittle Jan 13, 2022
396a823
wip
bjlittle Jan 13, 2022
ef91a43
wip
bjlittle Jan 13, 2022
545ccc5
wip
bjlittle Jan 13, 2022
841ddbd
wip
bjlittle Jan 13, 2022
e641630
wip
bjlittle Jan 13, 2022
7b22c31
update py38 lock
bjlittle Jan 13, 2022
cf49d61
no lock
bjlittle Jan 13, 2022
a2a599e
try py39
bjlittle Jan 13, 2022
1ae3f22
trajectory test only
bjlittle Jan 13, 2022
061913f
py38 nearest
bjlittle Jan 13, 2022
c406302
float32
bjlittle Jan 13, 2022
f6c2924
ubuntu 18.04
bjlittle Jan 13, 2022
c12827e
18.04
bjlittle Jan 13, 2022
9faf2e6
revert test
bjlittle Jan 13, 2022
fdb7629
no method
bjlittle Jan 13, 2022
aa9a04b
[pre-commit.ci] pre-commit autoupdate
pre-commit-ci[bot] Feb 14, 2022
537397a
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 14, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
100 changes: 100 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
# reference:
# - https://github.com/actions/cache
# - https://github.com/actions/checkout
# - https://github.com/marketplace/actions/setup-miniconda

name: CI

on:
push:
branches:
- main
pull_request:
branches:
- main

jobs:
tests:
name: ${{ matrix.os }} ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash -l {0}
strategy:
fail-fast: false
matrix:
os: ["ubuntu-18.04"]
python-version: ["3.8"]
include:
- os: ubuntu-18.04
lock-file: requirements/ci/nox.lock/py38-linux-64.lock

steps:
- uses: actions/checkout@v2

- name: Environment Variables
run: |
echo "IRIS_TEST_DATA_VERSION=2.5" >> $GITHUB_ENV

- name: Cache Iris Test Data
uses: actions/cache@v2
env:
CACHE_BUILD: 0
with:
path: ~/iris-test-data
key: ${{ runner.os }}-iris-test-data-v${{ env.IRIS_TEST_DATA_VERSION }}-build${{ env.CACHE_BUILD }}

- name: Download Iris Test Data
run: |
if [[ "$(cat ${HOME}/iris-test-data/version.txt)" != ${{ env.IRIS_TEST_DATA_VERSION }} ]]
then
wget --quiet https://github.com/SciTools/iris-test-data/archive/v${{ env.IRIS_TEST_DATA_VERSION }}.zip -O iris-test-data.zip
unzip -q iris-test-data.zip
mv iris-test-data-${{ env.IRIS_TEST_DATA_VERSION }} ${HOME}/iris-test-data
fi

- name: Cache Conda Packages
uses: actions/cache@v2
env:
CACHE_BUILD: 0
LOCK_FILE: ${{ matrix.lock-file }}
with:
path: ~/conda_pkgs_dir
key: ${{ runner.os }}-conda-py${{ matrix.python-version }}-build${{ env.CACHE_BUILD }}-${{ hashFiles(env.LOCK_FILE) }}

- name: Configure Conda Environment
uses: conda-incubator/setup-miniconda@v2
with:
environment-file: ${{ matrix.lock-file }}
miniforge-version: latest
use-mamba: true
activate-environment: iris-tests
auto-update-conda: false
use-only-tar-bz2: true

- name: Conda Details
run: |
conda info
conda list

- name: Install Iris
run: |
python -m pip install --no-deps -e .

- name: Configure Iris
env:
SITE_CFG: lib/iris/etc/site.cfg
run: |
echo "[Resources]" >> ${SITE_CFG}
echo "test_data_dir = ${HOME}/iris-test-data/test_data" >> ${SITE_CFG}
echo "doc_dir = ${GITHUB_WORKSPACE}/docs" >> ${SITE_CFG}
cat ${SITE_CFG}

- name: Import Iris
run: |
python -c "import iris; print(f'Installed Iris v{iris.__version__}')"

- name: Test Iris
run: |
#python -m iris.tests.runner --default-tests --system-tests
python lib/iris/tests/integration/test_trajectory.py TestTriPolar.test_tri_polar
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ repos:
- id: no-commit-to-branch

- repo: https://github.com/psf/black
rev: 21.12b0
rev: 22.1.0
hooks:
- id: black
pass_filenames: false
Expand All @@ -50,14 +50,14 @@ repos:
args: [--filter-files]

- repo: https://github.com/asottile/blacken-docs
rev: v1.12.0
rev: v1.12.1
hooks:
- id: blacken-docs
types: [file, rst]
additional_dependencies: [black==21.6b0]

- repo: https://github.com/aio-libs/sort-all
rev: v1.1.0
rev: v1.2.0
hooks:
- id: sort-all
types: [file, python]
2 changes: 1 addition & 1 deletion benchmarks/benchmarks/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def setup(self):
# Should generate 10 distinct contours, regardless of dim size.
dim_size = int(ARTIFICIAL_DIM_SIZE / 5)
repeat_number = int(dim_size / 10)
repeat_range = range(int((dim_size ** 2) / repeat_number))
repeat_range = range(int((dim_size**2) / repeat_number))
data = np.repeat(repeat_range, repeat_number)
data = data.reshape((dim_size,) * 2)

Expand Down
4 changes: 2 additions & 2 deletions docs/gallery_code/meteorology/plot_wind_barbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def main():

# To illustrate the full range of barbs, scale the wind speed up to pretend
# that a storm is passing over
magnitude = (uwind ** 2 + vwind ** 2) ** 0.5
magnitude = (uwind**2 + vwind**2) ** 0.5
magnitude.convert_units("knot")
max_speed = magnitude.collapsed(
("latitude", "longitude"), iris.analysis.MAX
Expand All @@ -41,7 +41,7 @@ def main():
vwind = vwind / max_speed * max_desired

# Create a cube containing the wind speed
windspeed = (uwind ** 2 + vwind ** 2) ** 0.5
windspeed = (uwind**2 + vwind**2) ** 0.5
windspeed.rename("windspeed")
windspeed.convert_units("knot")

Expand Down
2 changes: 1 addition & 1 deletion docs/gallery_code/meteorology/plot_wind_speed.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def main():
vwind = iris.load_cube(infile, "y_wind")

# Create a cube containing the wind speed.
windspeed = (uwind ** 2 + vwind ** 2) ** 0.5
windspeed = (uwind**2 + vwind**2) ** 0.5
windspeed.rename("windspeed")

# Plot the wind speed as a contour plot.
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1394,7 +1394,7 @@ def _lazy_rms(array, axis, **kwargs):
# all. Thus trying to use this aggregator with weights will currently
# raise an error in dask due to the unexpected keyword `weights`,
# rather than silently returning the wrong answer.
return da.sqrt(da.mean(array ** 2, axis=axis, **kwargs))
return da.sqrt(da.mean(array**2, axis=axis, **kwargs))


@_build_dask_mdtol_function
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/_grid_angles.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def _angle(p, q, r):
mid_lons = np.deg2rad(q[0])

pr = _3d_xyz_from_latlon(r[0], r[1]) - _3d_xyz_from_latlon(p[0], p[1])
pr_norm = np.sqrt(np.sum(pr ** 2, axis=0))
pr_norm = np.sqrt(np.sum(pr**2, axis=0))
pr_top = pr[1] * np.cos(mid_lons) - pr[0] * np.sin(mid_lons)

index = pr_norm == 0
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/_scipy_interpolate.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def compute_interp_weights(self, xi, method=None):
xi_shape, method, indices, norm_distances, out_of_bounds = prepared

# Allocate arrays for describing the sparse matrix.
n_src_values_per_result_value = 2 ** ndim
n_src_values_per_result_value = 2**ndim
n_result_values = len(indices[0])
n_non_zero = n_result_values * n_src_values_per_result_value
weights = np.ones(n_non_zero, dtype=norm_distances[0].dtype)
Expand Down
12 changes: 4 additions & 8 deletions lib/iris/analysis/calculus.py
Original file line number Diff line number Diff line change
Expand Up @@ -629,14 +629,10 @@ def curl(i_cube, j_cube, k_cube=None):
# (d/dtheta (i_cube * sin(lat)) - d_j_cube_dphi)
# phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta)
# theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube)
if (
y_coord.name()
not in [
"latitude",
"grid_latitude",
]
or x_coord.name() not in ["longitude", "grid_longitude"]
):
if y_coord.name() not in [
"latitude",
"grid_latitude",
] or x_coord.name() not in ["longitude", "grid_longitude"]:
raise ValueError(
"Expecting latitude as the y coord and "
"longitude as the x coord for spherical curl."
Expand Down
6 changes: 3 additions & 3 deletions lib/iris/analysis/cartography.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth):
raise ValueError("Bounds must be [n,2] array")

# fill in a new array of areas
radius_sqr = radius_of_earth ** 2
radius_sqr = radius_of_earth**2
radian_lat_64 = radian_lat_bounds.astype(np.float64)
radian_lon_64 = radian_lon_bounds.astype(np.float64)

Expand Down Expand Up @@ -1010,8 +1010,8 @@ def _transform_distance_vectors_tolerance_mask(
# Squared magnitudes should be equal to one within acceptable tolerance.
# A value of atol=2e-3 is used, which corresponds to a change in magnitude
# of approximately 0.1%.
sqmag_1_0 = u_one_t ** 2 + v_zero_t ** 2
sqmag_0_1 = u_zero_t ** 2 + v_one_t ** 2
sqmag_1_0 = u_one_t**2 + v_zero_t**2
sqmag_0_1 = u_zero_t**2 + v_one_t**2
mask = np.logical_not(
np.logical_and(
np.isclose(sqmag_1_0, ones, atol=2e-3),
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/maths.py
Original file line number Diff line number Diff line change
Expand Up @@ -514,7 +514,7 @@ def power(data, out=None):
return _math_op_common(
cube,
power,
cube.units ** exponent,
cube.units**exponent,
new_dtype=new_dtype,
in_place=in_place,
)
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/analysis/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,10 +168,10 @@ def _ones_like(cube):
covar = (s1 * s2).collapsed(
corr_coords, iris.analysis.SUM, weights=weights_1, mdtol=mdtol
)
var_1 = (s1 ** 2).collapsed(
var_1 = (s1**2).collapsed(
corr_coords, iris.analysis.SUM, weights=weights_1
)
var_2 = (s2 ** 2).collapsed(
var_2 = (s2**2).collapsed(
corr_coords, iris.analysis.SUM, weights=weights_2
)

Expand Down
4 changes: 2 additions & 2 deletions lib/iris/fileformats/netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -2736,9 +2736,9 @@ def _create_cf_data_variable(
cmin, cmax = _co_realise_lazy_arrays([cmin, cmax])
n = dtype.itemsize * 8
if masked:
scale_factor = (cmax - cmin) / (2 ** n - 2)
scale_factor = (cmax - cmin) / (2**n - 2)
else:
scale_factor = (cmax - cmin) / (2 ** n - 1)
scale_factor = (cmax - cmin) / (2**n - 1)
if dtype.kind == "u":
add_offset = cmin
elif dtype.kind == "i":
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/fileformats/pp.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,7 +403,7 @@ def _calculate_str_value_from_value(self):

def _calculate_value_from_str_value(self):
self._value = np.sum(
[10 ** i * val for i, val in enumerate(self._strvalue)]
[10**i * val for i, val in enumerate(self._strvalue)]
)

def __len__(self):
Expand All @@ -418,7 +418,7 @@ def __getitem__(self, key):
# if the key returns a list of values, then combine them together
# to an integer
if isinstance(val, list):
val = sum([10 ** i * val for i, val in enumerate(val)])
val = sum([10**i * val for i, val in enumerate(val)])

return val

Expand Down
6 changes: 3 additions & 3 deletions lib/iris/tests/integration/test_netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,7 @@ def setUp(self):
levels.units = "centimeters"
levels.positive = "down"
levels.axis = "Z"
levels[:] = np.linspace(0, 10 ** 5, 3)
levels[:] = np.linspace(0, 10**5, 3)

volcello.id = "volcello"
volcello.out_name = "volcello"
Expand Down Expand Up @@ -507,9 +507,9 @@ def _get_scale_factor_add_offset(cube, datatype):
else:
masked = False
if masked:
scale_factor = (cmax - cmin) / (2 ** n - 2)
scale_factor = (cmax - cmin) / (2**n - 2)
else:
scale_factor = (cmax - cmin) / (2 ** n - 1)
scale_factor = (cmax - cmin) / (2**n - 1)
if dt.kind == "u":
add_offset = cmin
elif dt.kind == "i":
Expand Down
3 changes: 3 additions & 0 deletions lib/iris/tests/integration/test_trajectory.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,9 @@ def setUp(self):
self.cube = cube
# define a latitude trajectory (put coords in a different order
# to the cube, just to be awkward)
# dtype = np.float32
# latitudes = np.array(list(range(-90, 90, 2)), dtype=dtype)
# longitudes = np.array([-90] * len(latitudes), dtype=dtype)
latitudes = list(range(-90, 90, 2))
longitudes = [-90] * len(latitudes)
self.sample_points = [
Expand Down
Loading