diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py index 0c8c34dd87996..b0498d0298785 100644 --- a/python/pyspark/sql/functions.py +++ b/python/pyspark/sql/functions.py @@ -1431,9 +1431,11 @@ def to_utc_timestamp(timestamp, tz): def timestamp_seconds(col): """ >>> from pyspark.sql.functions import timestamp_seconds + >>> spark.conf.set("spark.sql.session.timeZone", "America/Los_Angeles") >>> time_df = spark.createDataFrame([(1230219000,)], ['unix_time']) >>> time_df.select(timestamp_seconds(time_df.unix_time).alias('ts')).collect() [Row(ts=datetime.datetime(2008, 12, 25, 7, 30))] + >>> spark.conf.unset("spark.sql.session.timeZone") """ sc = SparkContext._active_spark_context diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala index 62ad5ea9b5935..239b705a473d0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala @@ -3360,8 +3360,8 @@ object functions { /** * Creates timestamp from the number of seconds since UTC epoch. - * @group = datetime_funcs - * @since = 3.1.0 + * @group datetime_funcs + * @since 3.1.0 */ def timestamp_seconds(e: Column): Column = withExpr { SecondsToTimestamp(e.expr)