Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 0 additions & 56 deletions python/pyspark/sql/tests/test_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -995,62 +995,6 @@ def test_make_timestamp_ntz(self):
with self.assertRaises(Exception):
F.make_timestamp_ntz(date=df_dt.date)

# Test 17: Invalid data types - should raise exception for invalid string to int cast
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about changing it to

with self.sql_conf({"spark.sql.ansi.enabled": True}):
            # Test 17: Invalid data types - should raise exception for invalid string to int cast
            with self.assertRaises(Exception):
                self.spark.range(1).select(
                    F.make_timestamp_ntz(
                        F.lit("invalid"), F.lit(5), F.lit(22), F.lit(10), F.lit(30), F.lit(0)
                    )
                ).collect()
....

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I feel we don't need to check this from pyspark's, it is guaranteed by spark sql.

with self.assertRaises(Exception):
self.spark.range(1).select(
F.make_timestamp_ntz(
F.lit("invalid"), F.lit(5), F.lit(22), F.lit(10), F.lit(30), F.lit(0)
)
).collect()

# Test 18: Out of range values (month=13) - should raise exception for invalid date
df_invalid = self.spark.createDataFrame(
[(2024, 13, 22, 10, 30, 0)], ["year", "month", "day", "hour", "minute", "second"]
)
with self.assertRaises(Exception):
df_invalid.select(
F.make_timestamp_ntz(
df_invalid.year,
df_invalid.month,
df_invalid.day,
df_invalid.hour,
df_invalid.minute,
df_invalid.second,
)
).collect()

# Test 19: Out of range values (hour=25) - should raise exception for invalid time
df_invalid_hour = self.spark.createDataFrame(
[(2024, 5, 22, 25, 30, 0)], ["year", "month", "day", "hour", "minute", "second"]
)
with self.assertRaises(Exception):
df_invalid_hour.select(
F.make_timestamp_ntz(
df_invalid_hour.year,
df_invalid_hour.month,
df_invalid_hour.day,
df_invalid_hour.hour,
df_invalid_hour.minute,
df_invalid_hour.second,
)
).collect()

# Test 20: February 29 in non-leap year
df_non_leap = self.spark.createDataFrame(
[(2023, 2, 29, 0, 0, 0)], ["year", "month", "day", "hour", "minute", "second"]
)
with self.assertRaises(Exception): # Should raise runtime exception for invalid date
df_non_leap.select(
F.make_timestamp_ntz(
df_non_leap.year,
df_non_leap.month,
df_non_leap.day,
df_non_leap.hour,
df_non_leap.minute,
df_non_leap.second,
)
).collect()

def test_make_date(self):
# SPARK-36554: expose make_date expression
df = self.spark.createDataFrame([(2020, 6, 26)], ["Y", "M", "D"])
Expand Down