-
Notifications
You must be signed in to change notification settings - Fork 50
Improve pre-validation for Flint index refresh options #297
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
dai-chen
merged 12 commits into
opensearch-project:main
from
dai-chen:improve-index-validation
Apr 17, 2024
Merged
Changes from all commits
Commits
Show all changes
12 commits
Select commit
Hold shift + click to select a range
1328586
Add index refresh validation
dai-chen 5b1a49a
Add Java doc
dai-chen 3380401
Implement index refresh options validation
dai-chen 047f39e
Merge branch 'main' into improve-index-validation
dai-chen 5ae67c5
Move validate to index builder and add separate suite for Hive test
dai-chen 3d6e306
Use in-memory Derby as Hive metastore
dai-chen 2874174
Fix broken IT
dai-chen 8320214
Add more IT
dai-chen 2c2e129
Rename Hive test base suite
dai-chen 2c47c16
Polish Javadoc and comments
dai-chen 7731309
Address PR comments
dai-chen 196ee3e
Merge branch 'main' into improve-index-validation
dai-chen File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
86 changes: 86 additions & 0 deletions
86
...rk-integration/src/main/scala/org/opensearch/flint/spark/FlintSparkValidationHelper.scala
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,86 @@ | ||
| /* | ||
| * Copyright OpenSearch Contributors | ||
| * SPDX-License-Identifier: Apache-2.0 | ||
| */ | ||
|
|
||
| package org.opensearch.flint.spark | ||
|
|
||
| import java.io.IOException | ||
|
|
||
| import org.apache.hadoop.fs.Path | ||
| import org.opensearch.flint.spark.covering.FlintSparkCoveringIndex | ||
| import org.opensearch.flint.spark.mv.FlintSparkMaterializedView | ||
| import org.opensearch.flint.spark.skipping.FlintSparkSkippingIndex | ||
|
|
||
| import org.apache.spark.internal.Logging | ||
| import org.apache.spark.sql.SparkSession | ||
| import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation | ||
| import org.apache.spark.sql.execution.command.DDLUtils | ||
| import org.apache.spark.sql.execution.streaming.CheckpointFileManager | ||
| import org.apache.spark.sql.flint.{loadTable, parseTableName, qualifyTableName} | ||
|
|
||
| /** | ||
| * Flint Spark validation helper. | ||
| */ | ||
| trait FlintSparkValidationHelper extends Logging { | ||
|
|
||
| /** | ||
| * Determines whether the source table(s) for a given Flint index are supported. | ||
| * | ||
| * @param spark | ||
| * Spark session | ||
| * @param index | ||
| * Flint index | ||
| * @return | ||
| * true if all non Hive, otherwise false | ||
| */ | ||
| def isTableProviderSupported(spark: SparkSession, index: FlintSparkIndex): Boolean = { | ||
| // Extract source table name (possibly more than one for MV query) | ||
| val tableNames = index match { | ||
| case skipping: FlintSparkSkippingIndex => Seq(skipping.tableName) | ||
| case covering: FlintSparkCoveringIndex => Seq(covering.tableName) | ||
| case mv: FlintSparkMaterializedView => | ||
| spark.sessionState.sqlParser | ||
| .parsePlan(mv.query) | ||
| .collect { case relation: UnresolvedRelation => | ||
| qualifyTableName(spark, relation.tableName) | ||
| } | ||
| } | ||
|
|
||
| // Validate if any source table is not supported (currently Hive only) | ||
| tableNames.exists { tableName => | ||
| val (catalog, ident) = parseTableName(spark, tableName) | ||
| val table = loadTable(catalog, ident).get | ||
|
|
||
| // TODO: add allowed table provider list | ||
| DDLUtils.isHiveTable(Option(table.properties().get("provider"))) | ||
| } | ||
| } | ||
|
|
||
| /** | ||
| * Checks whether a specified checkpoint location is accessible. Accessibility, in this context, | ||
| * means that the folder exists and the current Spark session has the necessary permissions to | ||
| * access it. | ||
| * | ||
| * @param spark | ||
| * Spark session | ||
| * @param checkpointLocation | ||
| * checkpoint location | ||
| * @return | ||
| * true if accessible, otherwise false | ||
| */ | ||
| def isCheckpointLocationAccessible(spark: SparkSession, checkpointLocation: String): Boolean = { | ||
| try { | ||
| val checkpointManager = | ||
| CheckpointFileManager.create( | ||
| new Path(checkpointLocation), | ||
| spark.sessionState.newHadoopConf()) | ||
|
|
||
| checkpointManager.exists(new Path(checkpointLocation)) | ||
| } catch { | ||
| case e: IOException => | ||
| logWarning(s"Failed to check if checkpoint location $checkpointLocation exists", e) | ||
| false | ||
| } | ||
| } | ||
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
44 changes: 44 additions & 0 deletions
44
integ-test/src/test/scala/org/apache/spark/sql/SparkHiveSupportSuite.scala
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,44 @@ | ||
| /* | ||
| * Copyright OpenSearch Contributors | ||
| * SPDX-License-Identifier: Apache-2.0 | ||
| */ | ||
|
|
||
| package org.apache.spark.sql | ||
|
|
||
| import org.apache.spark.SparkConf | ||
| import org.apache.spark.sql.hive.HiveSessionStateBuilder | ||
| import org.apache.spark.sql.internal.{SessionState, StaticSQLConf} | ||
| import org.apache.spark.sql.test.{SharedSparkSession, TestSparkSession} | ||
|
|
||
| /** | ||
| * Flint Spark base suite with Hive support enabled. Because enabling Hive support in Spark | ||
| * configuration alone is not adequate, as [[TestSparkSession]] disregards it and consistently | ||
| * creates its own instance of [[org.apache.spark.sql.test.TestSQLSessionStateBuilder]]. We need | ||
| * to override its session state with that of Hive in the meanwhile. | ||
| * | ||
| * Note that we need to extend [[SharedSparkSession]] to call super.sparkConf() method. | ||
| */ | ||
| trait SparkHiveSupportSuite extends SharedSparkSession { | ||
|
|
||
| override protected def sparkConf: SparkConf = { | ||
| super.sparkConf | ||
| // Enable Hive support | ||
| .set(StaticSQLConf.CATALOG_IMPLEMENTATION.key, "hive") | ||
| // Use in-memory Derby as Hive metastore so no need to clean up metastore_db folder after test | ||
| .set("javax.jdo.option.ConnectionURL", "jdbc:derby:memory:metastore_db;create=true") | ||
| .set("hive.metastore.uris", "") | ||
| } | ||
|
|
||
| override protected def createSparkSession: TestSparkSession = { | ||
| SparkSession.cleanupAnyExistingSession() | ||
| new FlintTestSparkSession(sparkConf) | ||
| } | ||
|
|
||
| class FlintTestSparkSession(sparkConf: SparkConf) extends TestSparkSession(sparkConf) { self => | ||
|
|
||
| override lazy val sessionState: SessionState = { | ||
| // Override to replace [[TestSQLSessionStateBuilder]] with Hive session state | ||
| new HiveSessionStateBuilder(spark, None).build() | ||
| } | ||
| } | ||
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.