Skip to content
72 changes: 58 additions & 14 deletions api/src/main/java/org/apache/iceberg/PartitionSpec.java
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
*/
public class PartitionSpec implements Serializable {
// IDs for partition fields start at 1000
private static final int PARTITION_DATA_ID_START = 1000;
static final int PARTITION_DATA_ID_START = 1000;

private final Schema schema;

Expand Down Expand Up @@ -174,7 +174,7 @@ public String partitionToPath(StructLike data) {
}

/**
* Returns true if this spec is equivalent to the other, with field names and partition field ids ignored.
* Returns true if this spec is compatible to the other, with field names and partition field ids ignored.
* That is, if both specs have the same number of fields, field order, source columns, and transforms.
*
* @param other another PartitionSpec
Expand All @@ -201,6 +201,21 @@ public boolean compatibleWith(PartitionSpec other) {
return true;
}

/**
* Returns true if this spec is equivalent to the other.
* That is, if both specs have the same number of fields, field order, and partition fields.
*
* @param other another PartitionSpec
* @return true if the specs have the same partition fields.
*/
boolean equivalentTo(PartitionSpec other) {
if (equals(other)) {
return true;
}
return Arrays.equals(fields, other.fields);
}


@Override
public boolean equals(Object other) {
if (this == other) {
Expand Down Expand Up @@ -311,7 +326,7 @@ public static class Builder {
private final Schema schema;
private final List<PartitionField> fields = Lists.newArrayList();
private final Set<String> partitionNames = Sets.newHashSet();
private Map<Integer, PartitionField> timeFields = Maps.newHashMap();
private Map<String, PartitionField> partitionFields = Maps.newHashMap();
private int specId = 0;
private final AtomicInteger lastAssignedFieldId = new AtomicInteger(PARTITION_DATA_ID_START - 1);

Expand Down Expand Up @@ -346,11 +361,29 @@ private void checkAndAddPartitionName(String name, Integer identitySourceColumnI
partitionNames.add(name);
}

private String getDedupKey(PartitionField field) {
String transformName = field.transform().getName();
if (transformName != null) {
return transformName + "(" + field.sourceId() + ")";
} else {
return null;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What about using the transform name in this situation? Then this would always catch duplicate transforms.

Also, should we add a case for truncate with different lengths?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yep, that is better. I add a getName() method to Transform interface.

Similar to bucket, we may only allow one truncate transform for a given field.
Do you know if there is a valid use case with multiple truncate for a field?

}
}

private void checkForRedundantPartitions(PartitionField field) {
PartitionField timeField = timeFields.get(field.sourceId());
Preconditions.checkArgument(timeField == null,
"Cannot add redundant partition: %s conflicts with %s", timeField, field);
timeFields.put(field.sourceId(), field);
String dedupKey = getDedupKey(field);
if (dedupKey == null) {
return;
}
PartitionField partitionField = partitionFields.get(dedupKey);
Preconditions.checkArgument(partitionField == null,
"Cannot add redundant partition: %s conflicts with %s", partitionField, field);
partitionFields.put(dedupKey, field);
}

private void checkDuplicateFieldId(int fieldId) {
Preconditions.checkArgument(fields.stream().allMatch(f -> f.fieldId() != fieldId),
"Cannot add a partition that duplicates another within %s.", fields);
}

public Builder withSpecId(int newSpecId) {
Expand All @@ -377,11 +410,11 @@ public Builder identity(String sourceName) {
}

public Builder year(String sourceName, String targetName) {
checkAndAddPartitionName(targetName);
Types.NestedField sourceColumn = findSourceColumn(sourceName);
PartitionField field = new PartitionField(
sourceColumn.fieldId(), nextFieldId(), targetName, Transforms.year(sourceColumn.type()));
checkForRedundantPartitions(field);
checkAndAddPartitionName(targetName);
fields.add(field);
return this;
}
Expand All @@ -391,11 +424,11 @@ public Builder year(String sourceName) {
}

public Builder month(String sourceName, String targetName) {
checkAndAddPartitionName(targetName);
Types.NestedField sourceColumn = findSourceColumn(sourceName);
PartitionField field = new PartitionField(
sourceColumn.fieldId(), nextFieldId(), targetName, Transforms.month(sourceColumn.type()));
checkForRedundantPartitions(field);
checkAndAddPartitionName(targetName);
fields.add(field);
return this;
}
Expand All @@ -405,11 +438,11 @@ public Builder month(String sourceName) {
}

public Builder day(String sourceName, String targetName) {
checkAndAddPartitionName(targetName);
Types.NestedField sourceColumn = findSourceColumn(sourceName);
PartitionField field = new PartitionField(
sourceColumn.fieldId(), nextFieldId(), targetName, Transforms.day(sourceColumn.type()));
checkForRedundantPartitions(field);
checkAndAddPartitionName(targetName);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this was needed because of my comment that the duplicate field should throw an exception before the duplicate name.

fields.add(field);
return this;
}
Expand All @@ -419,11 +452,11 @@ public Builder day(String sourceName) {
}

public Builder hour(String sourceName, String targetName) {
checkAndAddPartitionName(targetName);
Types.NestedField sourceColumn = findSourceColumn(sourceName);
PartitionField field = new PartitionField(
sourceColumn.fieldId(), nextFieldId(), targetName, Transforms.hour(sourceColumn.type()));
checkForRedundantPartitions(field);
checkAndAddPartitionName(targetName);
fields.add(field);
return this;
}
Expand All @@ -433,10 +466,12 @@ public Builder hour(String sourceName) {
}

public Builder bucket(String sourceName, int numBuckets, String targetName) {
checkAndAddPartitionName(targetName);
Types.NestedField sourceColumn = findSourceColumn(sourceName);
fields.add(new PartitionField(
sourceColumn.fieldId(), nextFieldId(), targetName, Transforms.bucket(sourceColumn.type(), numBuckets)));
PartitionField field = new PartitionField(
sourceColumn.fieldId(), nextFieldId(), targetName, Transforms.bucket(sourceColumn.type(), numBuckets));
checkForRedundantPartitions(field);
checkAndAddPartitionName(targetName);
fields.add(field);
return this;
}

Expand Down Expand Up @@ -476,11 +511,20 @@ Builder add(int sourceId, int fieldId, String name, String transform) {
Types.NestedField column = schema.findField(sourceId);
checkAndAddPartitionName(name, column.fieldId());
Preconditions.checkNotNull(column, "Cannot find source column: %s", sourceId);
checkDuplicateFieldId(fieldId);
fields.add(new PartitionField(sourceId, fieldId, name, Transforms.fromString(column.type(), transform)));
lastAssignedFieldId.getAndAccumulate(fieldId, Math::max);
return this;
}

Builder addAll(Iterable<PartitionField> fieldsToAdd) {
fieldsToAdd.forEach(field -> {
checkForRedundantPartitions(field);
add(field.sourceId(), field.fieldId(), field.name(), field.transform().toString());
});
return this;
}

public PartitionSpec build() {
PartitionSpec spec = new PartitionSpec(schema, specId, fields, lastAssignedFieldId.get());
checkCompatibility(spec, schema);
Expand Down
7 changes: 7 additions & 0 deletions api/src/main/java/org/apache/iceberg/Table.java
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,13 @@ public interface Table {
*/
UpdateSchema updateSchema();

/**
* Create a new {@link UpdatePartitionSpec} to alter the partition spec of this table and commit the change.
*
* @return a new {@link UpdatePartitionSpec}
*/
UpdatePartitionSpec updateSpec();

/**
* Create a new {@link UpdateProperties} to update table properties and commit the changes.
*
Expand Down
7 changes: 7 additions & 0 deletions api/src/main/java/org/apache/iceberg/Transaction.java
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,13 @@ public interface Transaction {
*/
UpdateSchema updateSchema();

/**
* Create a new {@link UpdatePartitionSpec} to alter the partition spec of this table.
*
* @return a new {@link UpdatePartitionSpec}
*/
UpdatePartitionSpec updateSpec();

/**
* Create a new {@link UpdateProperties} to update table properties.
*
Expand Down
175 changes: 175 additions & 0 deletions api/src/main/java/org/apache/iceberg/UpdatePartitionSpec.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg;

import org.apache.iceberg.exceptions.CommitFailedException;

/**
* API for partition spec evolution.
* <p>
* When committing, these changes will be applied to the current table metadata. Commit conflicts
* will not be resolved and will result in a {@link CommitFailedException}.
*/
public interface UpdatePartitionSpec extends PendingUpdate<PartitionSpec> {

/**
* Add a new partition field with identity transform to the partition spec.
* <p>
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @param targetName the name of this partition field
* @return this for method chaining
*/
UpdatePartitionSpec addIdentityField(String sourceName, String targetName);

/**
* Add a new partition field with identity transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @return this for method chaining
*/
UpdatePartitionSpec addIdentityField(String sourceName);

/**
* Add a new partition field with year transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @param targetName the name of this partition field
* @return this for method chaining
*/
UpdatePartitionSpec addYearField(String sourceName, String targetName);

/**
* Add a new partition field with year transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @return this for method chaining
*/
UpdatePartitionSpec addYearField(String sourceName);

/**
* Add a new partition field with month transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @param targetName the name of this partition field
* @return this for method chaining
*/
UpdatePartitionSpec addMonthField(String sourceName, String targetName);

/**
* Add a new partition field with month transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @return this for method chaining
*/
UpdatePartitionSpec addMonthField(String sourceName);

/**
* Add a new partition field with day transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @param targetName the name of this partition field
* @return this for method chaining
*/
UpdatePartitionSpec addDayField(String sourceName, String targetName);

/**
* Add a new partition field with day transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @return this for method chaining
*/
UpdatePartitionSpec addDayField(String sourceName);

/**
* Add a new partition field with hour transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @param targetName the name of this partition field
* @return this for method chaining
*/
UpdatePartitionSpec addHourField(String sourceName, String targetName);

/**
* Add a new partition field with hour transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @return this for method chaining
*/
UpdatePartitionSpec addHourField(String sourceName);

/**
* Add a new partition field with bucket transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @param numBuckets the number of buckets
* @param targetName the name of this partition field
* @return this for method chaining
*/
UpdatePartitionSpec addBucketField(String sourceName, int numBuckets, String targetName);

/**
* Add a new partition field with bucket transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @param numBuckets the number of buckets
* @return this for method chaining
*/
UpdatePartitionSpec addBucketField(String sourceName, int numBuckets);

/**
* Add a new partition field with truncate transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @param width the width of truncation
* @param targetName the name of this partition field
* @return this for method chaining
*/
UpdatePartitionSpec addTruncateField(String sourceName, int width, String targetName);

/**
* Add a new partition field with truncate transform to the partition spec.
*
* @param sourceName the field name of the source field in the {@link PartitionSpec spec's} table schema
* @param width the width of truncation
* @return this for method chaining
*/
UpdatePartitionSpec addTruncateField(String sourceName, int width);

/**
* Rename a partition field in the partition spec.
*
* @param name the name of a partition field to be renamed
* @param newName the new name of the partition field
* @return this for method chaining
*/
UpdatePartitionSpec renameField(String name, String newName);

/**
* Remove a partition field in the partition spec.
* <p>
* The partition field will be soft deleted for a table with V1 metadata and hard deleted in a higher version.
*
* @param name the name of a partition field to be removed
* @return this for method chaining
*/
UpdatePartitionSpec removeField(String name);

}
5 changes: 5 additions & 0 deletions api/src/main/java/org/apache/iceberg/transforms/Bucket.java
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,11 @@ public Type getResultType(Type sourceType) {
return Types.IntegerType.get();
}

@Override
public String getName() {
return "bucket";
}

private static class BucketInteger extends Bucket<Integer> {
private BucketInteger(int numBuckets) {
super(numBuckets);
Expand Down
5 changes: 5 additions & 0 deletions api/src/main/java/org/apache/iceberg/transforms/Dates.java
Original file line number Diff line number Diff line change
Expand Up @@ -125,4 +125,9 @@ public String toHumanString(Integer value) {
public String toString() {
return name;
}

@Override
public String getName() {
return "date";
}
}
Loading