Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 0 additions & 22 deletions .cirrus.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ container:

env:
# Skip specific tasks by name. Set to a non-empty string to skip.
SKIP_LINT_TASK: ""
SKIP_TEST_TASK: ""
SKIP_BENCHMARK_TASK: ""
# Maximum cache period (in weeks) before forcing a new cache upload.
Expand Down Expand Up @@ -81,27 +80,6 @@ LINUX_CONDA_TEMPLATE: &LINUX_CONDA_TEMPLATE
- conda install --quiet --name base ${CONDA_CACHE_PACKAGES}


#
# Linting
#
lint_task:
only_if: ${SKIP_LINT_TASK} == ""
auto_cancellation: true
name: "${CIRRUS_OS}: flake8 and black"
pip_cache:
folder: ~/.cache/pip
fingerprint_script:
- echo "${CIRRUS_TASK_NAME} py${PYTHON_VERSION}"
- echo "${PIP_CACHE_PACKAGES}"
- echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${PIP_CACHE_BUILD}"
lint_script:
- pip list
- python -m pip install --retries 3 --upgrade ${PIP_CACHE_PACKAGES}
- pip list
- nox --session flake8
- nox --session black


#
# Testing (Linux)
#
Expand Down
4 changes: 4 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,7 @@ repos:
# Run flake8.
- id: flake8
args: [--config=./.flake8]
additional_dependencies: [
'flake8-docstrings==1.6.0',
'flake8-import-order==0.18.2',
]
8 changes: 3 additions & 5 deletions benchmarks/asv_delegated_conda.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
"""
ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda`
subclass that manages the Conda environment via custom user scripts.

"""
"""ASV plug-in - managing Conda environment via custom user scripts."""

from os import environ
from os.path import getmtime
Expand Down Expand Up @@ -41,6 +37,8 @@ def __init__(
tagged_env_vars: dict,
) -> None:
"""
Create the instance.

Parameters
----------
conf : Config instance
Expand Down
8 changes: 4 additions & 4 deletions benchmarks/benchmarks/__init__.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
"""Benchmark tests for iris-esmf-regrid"""
"""Benchmark tests for iris-esmf-regrid."""


from os import environ


def disable_repeat_between_setup(benchmark_object):
"""
Decorator for benchmarks where object persistence would be inappropriate.
Decorate benchmarks where object persistence would be inappropriate.

E.g:
* Data is realised during testing.
Expand All @@ -32,7 +32,7 @@ def disable_repeat_between_setup(benchmark_object):

def skip_benchmark(benchmark_object):
"""
Decorator for benchmarks skipping benchmarks.
Decorate benchmarks to be skipped.

Simply doesn't return the object.

Expand All @@ -49,7 +49,7 @@ def skip_benchmark(benchmark_object):

def on_demand_benchmark(benchmark_object):
"""
Decorator. Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set.
Decorate benchmark(s) that are disabled unless ON_DEMAND_BENCHARKS env var is set.

For benchmarks that, for whatever reason, should not be run by default.
E.g:
Expand Down
53 changes: 46 additions & 7 deletions benchmarks/benchmarks/esmf_regridder/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,28 +3,25 @@
import os
from pathlib import Path

import numpy as np
import dask.array as da
import iris
from iris.cube import Cube
from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD
import numpy as np

from esmf_regrid.esmf_regridder import GridInfo
from esmf_regrid.experimental.unstructured_scheme import (
GridToMeshESMFRegridder,
MeshToGridESMFRegridder,
)
from esmf_regrid.schemes import ESMFAreaWeightedRegridder

from ..generate_data import _grid_cube, _gridlike_mesh_cube


def _make_small_grid_args():
"""
Not importing the one in test_GridInfo - if that changes, these benchmarks
would 'invisibly' change too.
# Not importing the one in test_GridInfo - if that changes, these benchmarks
# would 'invisibly' change too.

"""
small_x = 2
small_y = 3
small_grid_lon = np.array(range(small_x)) / (small_x + 1)
Expand All @@ -41,23 +38,29 @@ def _make_small_grid_args():


class TimeGridInfo:
"""Basic benchmarking for :class:~esmf_regrid.esmf_regridder.GridInfo`."""

def setup(self):
"""ASV setup method."""
lon, lat, lon_bounds, lat_bounds = _make_small_grid_args()
self.grid = GridInfo(lon, lat, lon_bounds, lat_bounds)

def time_make_grid(self):
"""Basic test for :meth:`~esmf_regrid.esmf_regridder.GridInfo.make_esmf_field`."""
"""Benchmark :meth:`~esmf_regrid.esmf_regridder.GridInfo.make_esmf_field` time."""
esmf_grid = self.grid.make_esmf_field()
esmf_grid.data[:] = 0

time_make_grid.version = 1


class MultiGridCompare:
"""Mixin to prepare common arguments for benchmarking between different grid sizes."""

params = ["similar", "large_source", "large_target", "mixed"]
param_names = ["source/target difference"]

def get_args(self, tp):
"""Prepare common arguments."""
lon_bounds = (-180, 180)
lat_bounds = (-90, 90)
n_lons_src = 20
Expand Down Expand Up @@ -86,7 +89,10 @@ def get_args(self, tp):


class TimeRegridding(MultiGridCompare):
"""Benchmarks for :class:`~esmf_regrid.esmf_regrid.schemes.ESMFAreaWeightedRegridder`."""

def setup(self, tp):
"""ASV setup method."""
(
lon_bounds,
lat_bounds,
Expand Down Expand Up @@ -117,14 +123,19 @@ def setup(self, tp):
self.tgt = tgt

def time_prepare_regridding(self, tp):
"""Benchmark the prepare time."""
_ = self.regrid_class(self.src, self.tgt)

def time_perform_regridding(self, tp):
"""Benchmark the perform time."""
_ = self.regridder(self.src)


class TimeLazyRegridding:
"""Lazy benchmarks for :class:`~esmf_regrid.esmf_regrid.schemes.ESMFAreaWeightedRegridder`."""

def setup_cache(self):
"""ASV setup_cache method."""
SYNTH_DATA_DIR = Path().cwd() / "tmp_data"
SYNTH_DATA_DIR.mkdir(exist_ok=True)
file = str(SYNTH_DATA_DIR.joinpath("chunked_cube.nc"))
Expand Down Expand Up @@ -159,25 +170,31 @@ def setup_cache(self):
return regridder, file

def setup(self, cache):
"""ASV setup method."""
regridder, file = cache
self.src = iris.load_cube(file)
cube = iris.load_cube(file)
self.result = regridder(cube)

def time_lazy_regridding(self, cache):
"""Benchmark the construction time of the lazy regridding operation."""
assert self.src.has_lazy_data()
regridder, _ = cache
_ = regridder(self.src)

def time_regridding_realisation(self, cache):
"""Benchmark the final regridding operation time."""
# Don't touch result.data - permanent realisation plays badly with
# ASV's re-run strategy.
assert self.result.has_lazy_data()
self.result.core_data().compute()


class TimeMeshToGridRegridding(TimeRegridding):
"""Benchmarks for :class:`~esmf_regrid.esmf_regrid.schemes.MeshToGridESMFRegridder`."""

def setup(self, tp):
"""ASV setup method."""
(
lon_bounds,
lat_bounds,
Expand Down Expand Up @@ -209,7 +226,10 @@ def setup(self, tp):


class TimeLazyMeshToGridRegridding:
"""Lazy benchmarks for :class:`~esmf_regrid.esmf_regrid.schemes.MeshToGridESMFRegridder`."""

def setup_cache(self):
"""ASV setup_cache method."""
SYNTH_DATA_DIR = Path().cwd() / "tmp_data"
SYNTH_DATA_DIR.mkdir(exist_ok=True)
file = str(SYNTH_DATA_DIR.joinpath("chunked_cube.nc"))
Expand Down Expand Up @@ -239,26 +259,32 @@ def setup_cache(self):
return regridder, file

def setup(self, cache):
"""ASV setup method."""
regridder, file = cache
with PARSE_UGRID_ON_LOAD.context():
self.src = iris.load_cube(file)
cube = iris.load_cube(file)
self.result = regridder(cube)

def time_lazy_regridding(self, cache):
"""Benchmark the construction time of the lazy regridding operation."""
assert self.src.has_lazy_data()
regridder, _ = cache
_ = regridder(self.src)

def time_regridding_realisation(self, cache):
"""Benchmark the final regridding operation time."""
# Don't touch result.data - permanent realisation plays badly with
# ASV's re-run strategy.
assert self.result.has_lazy_data()
self.result.core_data().compute()


class TimeGridToMeshRegridding(TimeRegridding):
"""Benchmarks for :class:`~esmf_regrid.esmf_regrid.schemes.GridToMeshESMFRegridder`."""

def setup(self, tp):
"""ASV setup method."""
(
lon_bounds,
lat_bounds,
Expand Down Expand Up @@ -290,7 +316,10 @@ def setup(self, tp):


class TimeLazyGridToMeshRegridding:
"""Lazy benchmarks for :class:`~esmf_regrid.esmf_regrid.schemes.GridToMeshESMFRegridder`."""

def setup_cache(self):
"""ASV setup_cache method."""
SYNTH_DATA_DIR = Path().cwd() / "tmp_data"
SYNTH_DATA_DIR.mkdir(exist_ok=True)
file = str(SYNTH_DATA_DIR.joinpath("chunked_cube.nc"))
Expand All @@ -317,28 +346,34 @@ def setup_cache(self):
return regridder, file

def setup(self, cache):
"""ASV setup method."""
regridder, file = cache
self.src = iris.load_cube(file)
cube = iris.load_cube(file)
self.result = regridder(cube)

def time_lazy_regridding(self, cache):
"""Benchmark the construction time of the lazy regridding operation."""
assert self.src.has_lazy_data()
regridder, _ = cache
_ = regridder(self.src)

def time_regridding_realisation(self, cache):
"""Benchmark the final regridding operation time."""
# Don't touch result.data - permanent realisation plays badly with
# ASV's re-run strategy.
assert self.result.has_lazy_data()
self.result.core_data().compute()


class TimeRegridderIO(MultiGridCompare):
"""Benchmarks for regridder saving and loading."""

params = [MultiGridCompare.params, ["mesh_to_grid", "grid_to_mesh"]]
param_names = MultiGridCompare.param_names + ["regridder type"]

def setup_cache(self):
"""ASV setup_cache method."""
from esmf_regrid.experimental.io import save_regridder

SYNTH_DATA_DIR = Path().cwd() / "tmp_data"
Expand Down Expand Up @@ -395,6 +430,7 @@ def setup_cache(self):
return file_dict

def setup(self, file_dict, tp, rgt):
"""ASV setup method."""
from esmf_regrid.experimental.io import load_regridder, save_regridder

self.load_regridder = load_regridder
Expand All @@ -405,11 +441,14 @@ def setup(self, file_dict, tp, rgt):
self.regridder = load_regridder(self.source_file)

def teardown(self, _, tp, rgt):
"""ASV teardown method."""
if os.path.exists(self.destination_file):
os.remove(self.destination_file)

def time_save(self, _, tp, rgt):
"""Benchmark the saving time."""
self.save_regridder(self.regridder, self.destination_file)

def time_load(self, _, tp, rgt):
"""Benchmark the loading time."""
_ = self.load_regridder(self.source_file)
Loading