diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/BaseHoodieWriteClient.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/BaseHoodieWriteClient.java index 33aad54ee5af3..b3d49a61ad711 100644 --- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/BaseHoodieWriteClient.java +++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/BaseHoodieWriteClient.java @@ -52,7 +52,6 @@ import org.apache.hudi.common.util.CleanerUtils; import org.apache.hudi.common.util.CommitUtils; import org.apache.hudi.common.util.Option; -import org.apache.hudi.common.util.StringUtils; import org.apache.hudi.common.util.ValidationUtils; import org.apache.hudi.common.util.collection.Pair; import org.apache.hudi.config.HoodieArchivalConfig; @@ -1327,25 +1326,10 @@ public final HoodieTable initTable(WriteOperationType operationType, Option lastInstant = - activeTimeline.filterCompletedInstants().filter(s -> s.getAction().equals(metaClient.getCommitActionType()) - || s.getAction().equals(HoodieActiveTimeline.REPLACE_COMMIT_ACTION)) - .lastInstant(); - if (lastInstant.isPresent()) { - HoodieCommitMetadata commitMetadata = HoodieCommitMetadata.fromBytes( - activeTimeline.getInstantDetails(lastInstant.get()).get(), HoodieCommitMetadata.class); - String extraSchema = commitMetadata.getExtraMetadata().get(SCHEMA_KEY); - if (!StringUtils.isNullOrEmpty(extraSchema)) { - config.setSchema(commitMetadata.getExtraMetadata().get(SCHEMA_KEY)); - } else { - throw new HoodieIOException("Latest commit does not have any schema in commit metadata"); - } - } else { - LOG.warn("None rows are deleted because the table is empty"); - } - } catch (IOException e) { - throw new HoodieIOException("IOException thrown while reading last commit metadata", e); + String schemaStr = new TableSchemaResolver(metaClient).getTableAvroSchema().toString(); + config.setSchema(schemaStr); + } catch (Exception e) { + throw new HoodieException("Failed to get schema from MetadataClient", e); } } diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/dml/TestDeleteFromTable.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/dml/TestDeleteFromTable.scala index b289ce74646c8..131c09f1089df 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/dml/TestDeleteFromTable.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/dml/TestDeleteFromTable.scala @@ -42,6 +42,12 @@ class TestDeleteFromTable extends HoodieSparkSqlTestBase { | LOCATION '${tmp.getCanonicalPath}/$tableName' """.stripMargin) + // Delete single row + spark.sql(s"DELETE FROM $tableName WHERE id = 1") + + // Delete again + spark.sql(s"DELETE FROM $tableName WHERE id = 1") + // NOTE: Do not write the field alias, the partition field must be placed last. spark.sql( s"""