Skip to content

Commit 7ef6887

Browse files
committed
Remove underscore from check methods.
1 parent 677f6bc commit 7ef6887

File tree

6 files changed

+18
-18
lines changed

6 files changed

+18
-18
lines changed

python/pyspark/sql/dataframe.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1906,9 +1906,9 @@ def toPandas(self):
19061906
if self.sql_ctx.getConf("spark.sql.execution.arrow.enabled", "false").lower() == "true":
19071907
try:
19081908
from pyspark.sql.types import _check_dataframe_localize_timestamps
1909-
from pyspark.sql.utils import _require_minimum_pyarrow_version
1909+
from pyspark.sql.utils import require_minimum_pyarrow_version
19101910
import pyarrow
1911-
_require_minimum_pyarrow_version()
1911+
require_minimum_pyarrow_version()
19121912
tables = self._collectAsArrow()
19131913
if tables:
19141914
table = pyarrow.concat_tables(tables)

python/pyspark/sql/session.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -494,11 +494,11 @@ def _create_from_pandas_with_arrow(self, pdf, schema, timezone):
494494
"""
495495
from pyspark.serializers import ArrowSerializer, _create_batch
496496
from pyspark.sql.types import from_arrow_schema, to_arrow_type, TimestampType
497-
from pyspark.sql.utils import _require_minimum_pandas_version, \
498-
_require_minimum_pyarrow_version
497+
from pyspark.sql.utils import require_minimum_pandas_version, \
498+
require_minimum_pyarrow_version
499499

500-
_require_minimum_pandas_version()
501-
_require_minimum_pyarrow_version()
500+
require_minimum_pandas_version()
501+
require_minimum_pyarrow_version()
502502

503503
from pandas.api.types import is_datetime64_dtype, is_datetime64tz_dtype
504504

python/pyspark/sql/tests.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,8 @@
5353
try:
5454
import pandas
5555
try:
56-
from pyspark.sql.utils import _require_minimum_pandas_version
57-
_require_minimum_pandas_version()
56+
from pyspark.sql.utils import require_minimum_pandas_version
57+
require_minimum_pandas_version()
5858
_have_pandas = True
5959
except:
6060
_have_old_pandas = True

python/pyspark/sql/types.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1686,8 +1686,8 @@ def _check_dataframe_localize_timestamps(pdf, timezone):
16861686
:param timezone: the timezone to convert. if None then use local timezone
16871687
:return pandas.DataFrame where any timezone aware columns have been converted to tz-naive
16881688
"""
1689-
from pyspark.sql.utils import _require_minimum_pandas_version
1690-
_require_minimum_pandas_version()
1689+
from pyspark.sql.utils import require_minimum_pandas_version
1690+
require_minimum_pandas_version()
16911691

16921692
from pandas.api.types import is_datetime64tz_dtype
16931693
tz = timezone or 'tzlocal()'
@@ -1707,8 +1707,8 @@ def _check_series_convert_timestamps_internal(s, timezone):
17071707
:param timezone: the timezone to convert. if None then use local timezone
17081708
:return pandas.Series where if it is a timestamp, has been UTC normalized without a time zone
17091709
"""
1710-
from pyspark.sql.utils import _require_minimum_pandas_version
1711-
_require_minimum_pandas_version()
1710+
from pyspark.sql.utils import require_minimum_pandas_version
1711+
require_minimum_pandas_version()
17121712

17131713
from pandas.api.types import is_datetime64_dtype, is_datetime64tz_dtype
17141714
# TODO: handle nested timestamps, such as ArrayType(TimestampType())?
@@ -1730,8 +1730,8 @@ def _check_series_convert_timestamps_localize(s, from_timezone, to_timezone):
17301730
:param to_timezone: the timezone to convert to. if None then use local timezone
17311731
:return pandas.Series where if it is a timestamp, has been converted to tz-naive
17321732
"""
1733-
from pyspark.sql.utils import _require_minimum_pandas_version
1734-
_require_minimum_pandas_version()
1733+
from pyspark.sql.utils import require_minimum_pandas_version
1734+
require_minimum_pandas_version()
17351735

17361736
import pandas as pd
17371737
from pandas.api.types import is_datetime64tz_dtype, is_datetime64_dtype

python/pyspark/sql/udf.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,9 @@ def _create_udf(f, returnType, evalType):
3737
if evalType == PythonEvalType.SQL_PANDAS_SCALAR_UDF or \
3838
evalType == PythonEvalType.SQL_PANDAS_GROUP_MAP_UDF:
3939
import inspect
40-
from pyspark.sql.utils import _require_minimum_pyarrow_version
40+
from pyspark.sql.utils import require_minimum_pyarrow_version
4141

42-
_require_minimum_pyarrow_version()
42+
require_minimum_pyarrow_version()
4343
argspec = inspect.getargspec(f)
4444

4545
if evalType == PythonEvalType.SQL_PANDAS_SCALAR_UDF and len(argspec.args) == 0 and \

python/pyspark/sql/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ def toJArray(gateway, jtype, arr):
112112
return jarr
113113

114114

115-
def _require_minimum_pandas_version():
115+
def require_minimum_pandas_version():
116116
""" Raise ImportError if minimum version of Pandas is not installed
117117
"""
118118
from distutils.version import LooseVersion
@@ -121,7 +121,7 @@ def _require_minimum_pandas_version():
121121
raise ImportError("Pandas >= 0.19.2 must be installed on calling Python process")
122122

123123

124-
def _require_minimum_pyarrow_version():
124+
def require_minimum_pyarrow_version():
125125
""" Raise ImportError if minimum version of pyarrow is not installed
126126
"""
127127
from distutils.version import LooseVersion

0 commit comments

Comments
 (0)