Skip to content

Commit 2e2f119

Browse files
authored
Refactor: Remove duplicate _has_json_arrow_type from loader.py (#2221)
1 parent 3d8b17f commit 2e2f119

File tree

1 file changed

+3
-18
lines changed

1 file changed

+3
-18
lines changed

bigframes/session/loader.py

Lines changed: 3 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@
4545
import google.cloud.bigquery.table
4646
from google.cloud.bigquery_storage_v1 import types as bq_storage_types
4747
import pandas
48-
import pyarrow as pa
4948

5049
import bigframes._tools
5150
import bigframes._tools.strings
@@ -1307,22 +1306,6 @@ def _transform_read_gbq_configuration(configuration: Optional[dict]) -> dict:
13071306
return configuration
13081307

13091308

1310-
def _has_json_arrow_type(arrow_type: pa.DataType) -> bool:
1311-
"""
1312-
Searches recursively for JSON array type within a PyArrow DataType.
1313-
"""
1314-
if arrow_type == bigframes.dtypes.JSON_ARROW_TYPE:
1315-
return True
1316-
if pa.types.is_list(arrow_type):
1317-
return _has_json_arrow_type(arrow_type.value_type)
1318-
if pa.types.is_struct(arrow_type):
1319-
for i in range(arrow_type.num_fields):
1320-
if _has_json_arrow_type(arrow_type.field(i).type):
1321-
return True
1322-
return False
1323-
return False
1324-
1325-
13261309
def _validate_dtype_can_load(name: str, column_type: bigframes.dtypes.Dtype):
13271310
"""
13281311
Determines whether a datatype is supported by bq load jobs.
@@ -1339,7 +1322,9 @@ def _validate_dtype_can_load(name: str, column_type: bigframes.dtypes.Dtype):
13391322
if column_type == bigframes.dtypes.JSON_DTYPE:
13401323
return
13411324

1342-
if isinstance(column_type, pandas.ArrowDtype) and _has_json_arrow_type(
1325+
if isinstance(
1326+
column_type, pandas.ArrowDtype
1327+
) and bigframes.dtypes.contains_db_dtypes_json_arrow_type(
13431328
column_type.pyarrow_dtype
13441329
):
13451330
raise NotImplementedError(

0 commit comments

Comments
 (0)