Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Delete built-in cfgrib backend #7670

Merged
merged 4 commits into from
Mar 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion ci/requirements/all-but-dask.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ dependencies:
- bottleneck
- cartopy
- cdms2
- cfgrib
- cftime
- coveralls
- flox
Expand Down
1 change: 0 additions & 1 deletion ci/requirements/doc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ dependencies:
- python=3.10
- bottleneck
- cartopy
- cfgrib>=0.9
- dask-core>=2022.1
- h5netcdf>=0.13
- ipykernel
Expand Down
1 change: 0 additions & 1 deletion ci/requirements/environment-py311.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ dependencies:
- bottleneck
- cartopy
# - cdms2
- cfgrib
- cftime
- dask-core
- distributed
Expand Down
1 change: 0 additions & 1 deletion ci/requirements/environment-windows-py311.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ dependencies:
- bottleneck
- cartopy
# - cdms2 # Not available on Windows
# - cfgrib # Causes Python interpreter crash on Windows: https://github.com/pydata/xarray/pull/3340
- cftime
- dask-core
- distributed
Expand Down
1 change: 0 additions & 1 deletion ci/requirements/environment-windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ dependencies:
- bottleneck
- cartopy
# - cdms2 # Not available on Windows
# - cfgrib # Causes Python interpreter crash on Windows: https://github.com/pydata/xarray/pull/3340
- cftime
- dask-core
- distributed
Expand Down
1 change: 0 additions & 1 deletion ci/requirements/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ dependencies:
- bottleneck
- cartopy
- cdms2
- cfgrib
- cftime
- dask-core
- distributed
Expand Down
1 change: 0 additions & 1 deletion ci/requirements/min-all-deps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ dependencies:
- bottleneck=1.3
- cartopy=0.20
- cdms2=3.1
- cfgrib=0.9
- cftime=1.5
- coveralls
- dask-core=2022.1
Expand Down
2 changes: 0 additions & 2 deletions doc/getting-started-guide/installing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,6 @@ For netCDF and IO
other gridded raster datasets.
- `iris <https://github.com/scitools/iris>`__: for conversion to and from iris'
Cube objects
- `cfgrib <https://github.com/ecmwf/cfgrib>`__: for reading GRIB files via the
*ECMWF ecCodes* library.

For accelerating xarray
~~~~~~~~~~~~~~~~~~~~~~~
Expand Down
2 changes: 1 addition & 1 deletion doc/user-guide/io.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1257,7 +1257,7 @@ GRIB format via cfgrib

Xarray supports reading GRIB files via ECMWF cfgrib_ python driver,
if it is installed. To open a GRIB file supply ``engine='cfgrib'``
to :py:func:`open_dataset`:
to :py:func:`open_dataset` after installing cfgrib_:

.. ipython::
:verbatim:
Expand Down
4 changes: 4 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,10 @@ Documentation
Internal Changes
~~~~~~~~~~~~~~~~

- Remove internal support for reading GRIB files through the ``cfgrib`` backend. ``cfgrib`` now uses the external
backend interface, so no existing code should break.
By `Deepak Cherian <https://github.com/dcherian>`_.

.. _whats-new.2023.03.0:

v2023.03.0 (March 22, 2023)
Expand Down
1 change: 0 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,6 @@ io =
fsspec
cftime
rasterio
cfgrib
pooch
## Scitools packages & dependencies (e.g: cartopy, cf-units) can be hard to install
# scitools-iris
Expand Down
2 changes: 0 additions & 2 deletions xarray/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
DataStores provide a uniform interface for saving and loading data in different
formats. They should not be used directly, but rather through Dataset objects.
"""
from xarray.backends.cfgrib_ import CfGribDataStore
from xarray.backends.common import AbstractDataStore, BackendArray, BackendEntrypoint
from xarray.backends.file_manager import (
CachingFileManager,
Expand All @@ -30,7 +29,6 @@
"BackendEntrypoint",
"FileManager",
"CachingFileManager",
"CfGribDataStore",
"DummyFileManager",
"InMemoryDataStore",
"NetCDF4DataStore",
Expand Down
13 changes: 6 additions & 7 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
T_NetcdfEngine = Literal["netcdf4", "scipy", "h5netcdf"]
T_Engine = Union[
T_NetcdfEngine,
Literal["pydap", "pynio", "pseudonetcdf", "cfgrib", "zarr"],
Literal["pydap", "pynio", "pseudonetcdf", "zarr"],
type[BackendEntrypoint],
str, # no nice typing support for custom backends
None,
Expand All @@ -64,7 +64,6 @@
"h5netcdf": backends.H5NetCDFStore.open,
"pynio": backends.NioDataStore,
"pseudonetcdf": backends.PseudoNetCDFDataStore.open,
"cfgrib": backends.CfGribDataStore,
"zarr": backends.ZarrStore.open_group,
}

Expand Down Expand Up @@ -387,7 +386,7 @@ def open_dataset(
ends with .gz, in which case the file is gunzipped and opened with
scipy.io.netcdf (only netCDF3 supported). Byte-strings or file-like
objects are opened by scipy.io.netcdf (netCDF3) or h5py (netCDF4/HDF).
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", "cfgrib", \
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", \
"pseudonetcdf", "zarr", None}, installed backend \
or subclass of xarray.backends.BackendEntrypoint, optional
Engine to use when reading files. If not provided, the default engine
Expand Down Expand Up @@ -479,7 +478,7 @@ def open_dataset(
relevant when using dask or another form of parallelism. By default,
appropriate locks are chosen to safely read and write files with the
currently active dask scheduler. Supported by "netcdf4", "h5netcdf",
"scipy", "pynio", "pseudonetcdf", "cfgrib".
"scipy", "pynio", "pseudonetcdf".

See engine open function for kwargs accepted by each specific engine.

Expand Down Expand Up @@ -576,7 +575,7 @@ def open_dataarray(
ends with .gz, in which case the file is gunzipped and opened with
scipy.io.netcdf (only netCDF3 supported). Byte-strings or file-like
objects are opened by scipy.io.netcdf (netCDF3) or h5py (netCDF4/HDF).
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", "cfgrib", \
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", \
"pseudonetcdf", "zarr", None}, installed backend \
or subclass of xarray.backends.BackendEntrypoint, optional
Engine to use when reading files. If not provided, the default engine
Expand Down Expand Up @@ -666,7 +665,7 @@ def open_dataarray(
relevant when using dask or another form of parallelism. By default,
appropriate locks are chosen to safely read and write files with the
currently active dask scheduler. Supported by "netcdf4", "h5netcdf",
"scipy", "pynio", "pseudonetcdf", "cfgrib".
"scipy", "pynio", "pseudonetcdf".

See engine open function for kwargs accepted by each specific engine.

Expand Down Expand Up @@ -803,7 +802,7 @@ def open_mfdataset(
If provided, call this function on each dataset prior to concatenation.
You can find the file-name from which each dataset was loaded in
``ds.encoding["source"]``.
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", "cfgrib", \
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", \
"pseudonetcdf", "zarr", None}, installed backend \
or subclass of xarray.backends.BackendEntrypoint, optional
Engine to use when reading files. If not provided, the default engine
Expand Down
148 changes: 0 additions & 148 deletions xarray/backends/cfgrib_.py

This file was deleted.

1 change: 0 additions & 1 deletion xarray/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ def _importorskip(
has_zarr, requires_zarr = _importorskip("zarr")
has_fsspec, requires_fsspec = _importorskip("fsspec")
has_iris, requires_iris = _importorskip("iris")
has_cfgrib, requires_cfgrib = _importorskip("cfgrib")
has_numbagg, requires_numbagg = _importorskip("numbagg")
has_seaborn, requires_seaborn = _importorskip("seaborn")
has_sparse, requires_sparse = _importorskip("sparse")
Expand Down
46 changes: 0 additions & 46 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@
has_scipy,
mock,
network,
requires_cfgrib,
requires_cftime,
requires_dask,
requires_fsspec,
Expand Down Expand Up @@ -4176,51 +4175,6 @@ def test_weakrefs(self) -> None:
assert_identical(actual, expected)


@requires_cfgrib
class TestCfGrib:
def test_read(self) -> None:
expected = {
"number": 2,
"time": 3,
"isobaricInhPa": 2,
"latitude": 3,
"longitude": 4,
}
with open_example_dataset("example.grib", engine="cfgrib") as ds:
assert ds.dims == expected
assert list(ds.data_vars) == ["z", "t"]
assert ds["z"].min() == 12660.0

def test_read_filter_by_keys(self) -> None:
kwargs = {"filter_by_keys": {"shortName": "t"}}
expected = {
"number": 2,
"time": 3,
"isobaricInhPa": 2,
"latitude": 3,
"longitude": 4,
}
with open_example_dataset(
"example.grib", engine="cfgrib", backend_kwargs=kwargs
) as ds:
assert ds.dims == expected
assert list(ds.data_vars) == ["t"]
assert ds["t"].min() == 231.0

def test_read_outer(self) -> None:
expected = {
"number": 2,
"time": 3,
"isobaricInhPa": 2,
"latitude": 2,
"longitude": 3,
}
with open_example_dataset("example.grib", engine="cfgrib") as ds:
res = ds.isel(latitude=[0, 2], longitude=[0, 1, 2])
assert res.dims == expected
assert res["t"].min() == 231.0


@requires_pseudonetcdf
@pytest.mark.filterwarnings("ignore:IOAPI_ISPH is assumed to be 6370000")
class TestPseudoNetCDFFormat:
Expand Down
Loading