From aef2c9c5d890b808384cfae906b4ea1f722659a0 Mon Sep 17 00:00:00 2001 From: Yann Date: Thu, 2 Dec 2021 10:14:51 +0800 Subject: [PATCH] [MINOR] use catalog schema if can not find table schema --- .../apache/spark/sql/catalyst/catalog/HoodieCatalogTable.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/catalyst/catalog/HoodieCatalogTable.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/catalyst/catalog/HoodieCatalogTable.scala index 78081220e1ae5..fdf1b062ed5fd 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/catalyst/catalog/HoodieCatalogTable.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/catalyst/catalog/HoodieCatalogTable.scala @@ -117,7 +117,7 @@ class HoodieCatalogTable(val spark: SparkSession, val table: CatalogTable) exten * Make StructField nullable. */ lazy val tableSchema: StructType = { - val originSchema = getTableSqlSchema(metaClient, includeMetadataFields = true).get + val originSchema = getTableSqlSchema(metaClient, includeMetadataFields = true).getOrElse(table.schema) StructType(originSchema.map(_.copy(nullable = true))) }