Skip to content

Commit

Permalink
fix(pyspark): make has_operation method a @classmethod
Browse files Browse the repository at this point in the history
  • Loading branch information
cpcloud committed Jul 28, 2023
1 parent 3c558cd commit c1b7dbc
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 2 deletions.
1 change: 1 addition & 0 deletions ibis/backends/pyspark/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,6 +561,7 @@ def compute_stats(
name = self._fully_qualified_name(name, database)
return self.raw_sql(f"ANALYZE TABLE {name} COMPUTE STATISTICS{maybe_noscan}")

@classmethod
def has_operation(cls, operation: type[ops.Value]) -> bool:
return operation in PySparkExprTranslator._registry

Expand Down
21 changes: 19 additions & 2 deletions ibis/backends/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1013,8 +1013,25 @@ def test_has_operation_no_geo(con, op):
they're excluded here, skipping the few backends that explicitly do
support them.
"""
for op in [ops.GeoDistance, ops.GeoAsText, ops.GeoUnaryUnion]:
assert not con.has_operation(op)
assert not con.has_operation(op)


@pytest.mark.parametrize(
("module_name", "op"),
[
param(backend, obj, marks=getattr(mark, backend), id=f"{backend}-{name}")
for name, obj in sorted(inspect.getmembers(builtins), key=itemgetter(0))
for backend in sorted(ALL_BACKENDS)
# filter out builtins that are types, except for tuples on ClickHouse
# because tuples are used to represent lists of expressions
if isinstance(obj, type)
if (obj != tuple or backend != "clickhouse")
if (backend != "pyspark" or vparse(pd.__version__) < vparse("2"))
],
)
def test_has_operation_no_builtins(module_name, op):
mod = importlib.import_module(f"ibis.backends.{module_name}")
assert not mod.Backend.has_operation(op)


def test_get_backend(con, alltypes, monkeypatch):
Expand Down

0 comments on commit c1b7dbc

Please sign in to comment.