Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ private Stream<WriteStatus> runClusteringForGroup(ClusteringGroupInfo clustering
/**
* Execute clustering to write inputRecords into new files as defined by rules in strategy parameters.
* The number of new file groups created is bounded by numOutputGroups.
* Note that commit is not done as part of strategy. commit is callers responsibility.
* Note that commit is not done as part of strategy. Commit is callers responsibility.
*/
public abstract Iterator<List<WriteStatus>> performClusteringWithRecordsIterator(final Iterator<HoodieRecord<T>> records, final int numOutputGroups,
final String instantTime,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ private SparkInternalSchemaConverter() {
public static final String HOODIE_VALID_COMMITS_LIST = "hoodie.valid.commits.list";

/**
* Converts a spark schema to an hudi internal schema. Fields without IDs are kept and assigned fallback IDs.
* Convert a spark schema to an hudi internal schema. Fields without IDs are kept and assigned fallback IDs.
*
* @param sparkSchema a spark schema
* @return a matching internal schema for the provided spark schema
Expand Down Expand Up @@ -157,7 +157,7 @@ public static Type buildTypeFromStructType(DataType sparkType, Boolean firstVisi
}

/**
* Converts Spark schema to Hudi internal schema, and prune fields.
* Convert Spark schema to Hudi internal schema, and prune fields.
* Fields without IDs are kept and assigned fallback IDs.
*
* @param sparkSchema a pruned spark schema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
import scala.collection.JavaConverters;

/**
* Spark validator utils to verify and run any precommit validators configured.
* Spark validator utils to verify and run any pre-commit validators configured.
*/
public class SparkValidatorUtils {
private static final Logger LOG = LogManager.getLogger(BaseSparkCommitActionExecutor.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ public byte[] getBytes(Schema schema) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream output = new DataOutputStream(baos);

// 2. Compress and Write schema out
// 1. Compress and Write schema out
byte[] schemaContent = compress(schema.toString());
output.writeInt(schemaContent.length);
output.write(schemaContent);
Expand All @@ -318,10 +318,10 @@ public byte[] getBytes(Schema schema) throws IOException {
recordItr.forEachRemaining(records::add);
}

// 3. Write total number of records
// 2. Write total number of records
output.writeInt(records.size());

// 4. Write the records
// 3. Write the records
Iterator<IndexedRecord> itr = records.iterator();
while (itr.hasNext()) {
IndexedRecord s = itr.next();
Expand Down