-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-26246][SQL] Inferring TimestampType from JSON #23201
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Closed
Closed
Changes from 7 commits
Commits
Show all changes
18 commits
Select commit
Hold shift + click to select a range
2a26e2c
Added a test for timestamp inferring
MaxGekk bd47207
Infer date and timestamp types
MaxGekk 9dbdf0a
Test for date type
MaxGekk 9376832
Added a test to check that inferring of the date type is prioritised …
MaxGekk 05bbfea
Infer date type before timestamp type
MaxGekk 53778f9
Merge remote-tracking branch 'origin/master' into json-infer-time
MaxGekk 63ebf42
Fix merges
MaxGekk f92ff86
Merge remote-tracking branch 'origin/master' into json-infer-time
MaxGekk e6fc432
Inferring timestamp only
MaxGekk b27d081
Test for inferring timestamps and decimals
MaxGekk c59e3e8
`type` -> dt
MaxGekk e7471a7
GMT -> UTC
MaxGekk 82816ed
Test for fallback to string type
MaxGekk b0d1374
Fix task is not serializable
MaxGekk 5782de5
Added test for schema inferring
MaxGekk 63a6568
Roundtrip test for timestamp inferring
MaxGekk e67a2a1
Merge branch 'json-infer-time' of github.com:MaxGekk/spark-1 into jso…
MaxGekk 11daee3
Testing for legacy and new timestamp parser
MaxGekk File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
83 changes: 83 additions & 0 deletions
83
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/json/JsonInferSchemaSuite.scala
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,83 @@ | ||
| /* | ||
| * Licensed to the Apache Software Foundation (ASF) under one or more | ||
| * contributor license agreements. See the NOTICE file distributed with | ||
| * this work for additional information regarding copyright ownership. | ||
| * The ASF licenses this file to You under the Apache License, Version 2.0 | ||
| * (the "License"); you may not use this file except in compliance with | ||
| * the License. You may obtain a copy of the License at | ||
| * | ||
| * http://www.apache.org/licenses/LICENSE-2.0 | ||
| * | ||
| * Unless required by applicable law or agreed to in writing, software | ||
| * distributed under the License is distributed on an "AS IS" BASIS, | ||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| * See the License for the specific language governing permissions and | ||
| * limitations under the License. | ||
| */ | ||
|
|
||
| package org.apache.spark.sql.catalyst.json | ||
|
|
||
| import com.fasterxml.jackson.core.JsonFactory | ||
|
|
||
| import org.apache.spark.SparkFunSuite | ||
| import org.apache.spark.sql.types._ | ||
|
|
||
| class JsonInferSchemaSuite extends SparkFunSuite { | ||
|
|
||
| def checkType(options: Map[String, String], json: String, `type`: DataType): Unit = { | ||
MaxGekk marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| val jsonOptions = new JSONOptions(options, "GMT", "") | ||
| val inferSchema = new JsonInferSchema(jsonOptions) | ||
| val factory = new JsonFactory() | ||
| jsonOptions.setJacksonOptions(factory) | ||
| val parser = CreateJacksonParser.string(factory, json) | ||
| parser.nextToken() | ||
| val expectedType = StructType(Seq(StructField("a", `type`, true))) | ||
|
|
||
| assert(inferSchema.inferField(parser) === expectedType) | ||
| } | ||
|
|
||
| def checkTimestampType(pattern: String, json: String): Unit = { | ||
| checkType(Map("timestampFormat" -> pattern), json, TimestampType) | ||
| } | ||
|
|
||
| test("inferring timestamp type") { | ||
| checkTimestampType("yyyy", """{"a": "2018"}""") | ||
| checkTimestampType("yyyy=MM", """{"a": "2018=12"}""") | ||
| checkTimestampType("yyyy MM dd", """{"a": "2018 12 02"}""") | ||
| checkTimestampType( | ||
| "yyyy-MM-dd'T'HH:mm:ss.SSS", | ||
| """{"a": "2018-12-02T21:04:00.123"}""") | ||
| checkTimestampType( | ||
| "yyyy-MM-dd'T'HH:mm:ss.SSSSSSXXX", | ||
| """{"a": "2018-12-02T21:04:00.123567+01:00"}""") | ||
| } | ||
|
|
||
| def checkDateType(pattern: String, json: String): Unit = { | ||
| checkType(Map("dateFormat" -> pattern), json, DateType) | ||
| } | ||
|
|
||
| test("inferring date type") { | ||
| checkDateType("yyyy", """{"a": "2018"}""") | ||
| checkDateType("yyyy-MM", """{"a": "2018-12"}""") | ||
| checkDateType("yyyy-MM-dd", """{"a": "2018-12-02"}""") | ||
| } | ||
|
|
||
| test("strict inferring of date and timestamps") { | ||
MaxGekk marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| checkType( | ||
| options = Map( | ||
| "dateFormat" -> "yyyy-MM-dd", | ||
| "timestampFormat" -> "yyyy-MM-dd'T'HH:mm:ss.SSS" | ||
| ), | ||
| json = """{"a": "2018-12-02T21:04:00.123"}""", | ||
| `type` = TimestampType | ||
| ) | ||
| checkType( | ||
| options = Map( | ||
| "dateFormat" -> "yyyy-MM-dd", | ||
| "timestampFormat" -> "yyyy-MM-dd'T'HH:mm:ss.SSS" | ||
| ), | ||
| json = """{"a": "2018-12-02"}""", | ||
| `type` = DateType | ||
| ) | ||
| } | ||
| } | ||
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
shall we abstract out this logic for all the text sources?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yes, we can do that. There is some common code that could be shared. Can we do it in a separate PR?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
sure. How many text data sources already support it?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
DateTypeis not inferred at all but there is another type inference code that could be shared between JSON and CSV (maybe somewhere else).There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I checked
PartitioningUtils.inferPartitionColumnValue, we try timestamp first and then date. Shall we follow it?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
do you mean partition value type inference will have a different result than json value type inference?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I didn't mean type inference in partition values but you are probably right we should follow the same logic in schema inferring in datasources and partition value types.
Just wondering how it works for now, this code:
spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala
Lines 474 to 482 in 5a140b7
spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/CSVInferSchema.scala
Line 163 in f982ca0
Maybe
inferPartitionColumnValueshould ask a datasource for inferring date/timestamp types?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
the partition feature is shared between all the file-based sources, I think it's an overkill to make it differ with different data sources.
The simplest solution to me is asking all text sources to follow the behavior of partition value type inference.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yea, one time I tried to match it with CSV a long long ago but I kind of gave up due to behaviour changes IIRC. If that's possible, it should be awesome.
If that's difficult, matching the behaviour within text based datasource (meaning CSV and JSON I guess) should be good enough.
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@cloud-fan, that works only if we use default date/timestamp patterns. Both should do the exact match with pattern, which unfortunately the current parsing library (SimpleDateFormat) does not allow.
The order here is just to make it look better and both shouldn't be dependent on its order. I think we should support those inferences after completely switching the library to
java.time.format.*(which does an exact match, and exists in JDK 8) without a legacy. That should make this change easier without a hole.