Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,13 @@ public int size() {
@Override
public <T> T get(int pos, Class<T> javaClass) {
if (transforms[pos] != null) {
return javaClass.cast(transforms[pos].apply(wrapped.get(pos, Object.class)));
Object value = wrapped.get(pos, Object.class);
if (value == null) {
// transforms function don't allow to handle null values, so just return null here.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for adding a comment to clarify why you're making this change!

return null;
} else {
return javaClass.cast(transforms[pos].apply(value));
}
}
return wrapped.get(pos, javaClass);
}
Expand Down
104 changes: 104 additions & 0 deletions data/src/test/java/org/apache/iceberg/RecordWrapperTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg;

import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.StructLikeWrapper;
import org.junit.Assert;
import org.junit.Test;

import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;

public abstract class RecordWrapperTest {

private static final Types.StructType PRIMITIVE_WITHOUT_TIME = Types.StructType.of(
required(100, "id", Types.LongType.get()),
optional(101, "data", Types.StringType.get()),
required(102, "b", Types.BooleanType.get()),
optional(103, "i", Types.IntegerType.get()),
required(104, "l", Types.LongType.get()),
optional(105, "f", Types.FloatType.get()),
required(106, "d", Types.DoubleType.get()),
optional(107, "date", Types.DateType.get()),
required(108, "ts_tz", Types.TimestampType.withZone()),
required(110, "s", Types.StringType.get()),
required(112, "fixed", Types.FixedType.ofLength(7)),
optional(113, "bytes", Types.BinaryType.get()),
required(114, "dec_9_0", Types.DecimalType.of(9, 0)),
required(115, "dec_11_2", Types.DecimalType.of(11, 2)),
required(116, "dec_38_10", Types.DecimalType.of(38, 10))// maximum precision
);

private static final Types.StructType TIMESTAMP_WITHOUT_ZONE = Types.StructType.of(
required(101, "ts0", Types.TimestampType.withoutZone()),
required(102, "ts1", Types.TimestampType.withoutZone())
);

protected static final Types.StructType TIME = Types.StructType.of(
required(100, "time0", Types.TimeType.get()),
optional(101, "time1", Types.TimeType.get())
);

@Test
public void testSimpleStructWithoutTime() {
generateAndValidate(new Schema(PRIMITIVE_WITHOUT_TIME.fields()));
}

@Test
public void testTimestampWithoutZone() {
generateAndValidate(new Schema(TIMESTAMP_WITHOUT_ZONE.fields()));
}

@Test
public void testTime() {
generateAndValidate(new Schema(TIME.fields()));
}

@Test
public void testNestedSchema() {
Types.StructType structType = Types.StructType.of(
required(0, "id", Types.LongType.get()),
required(1, "level1", Types.StructType.of(
optional(2, "level2", Types.StructType.of(
required(3, "level3", Types.StructType.of(
optional(4, "level4", Types.StructType.of(
required(5, "level5", Types.StructType.of(
PRIMITIVE_WITHOUT_TIME.fields()
))
))
))
))
))
);

generateAndValidate(new Schema(structType.fields()));
}

private void generateAndValidate(Schema schema) {
generateAndValidate(schema, Assert::assertEquals);
}

public interface AssertMethod {
void assertEquals(String message, StructLikeWrapper expected, StructLikeWrapper actual);
}

protected abstract void generateAndValidate(Schema schema, AssertMethod assertMethod);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.flink;

import java.util.Iterator;
import org.apache.flink.table.data.RowData;
import org.apache.iceberg.RecordWrapperTest;
import org.apache.iceberg.Schema;
import org.apache.iceberg.StructLike;
import org.apache.iceberg.data.InternalRecordWrapper;
import org.apache.iceberg.data.RandomGenericData;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.flink.data.RandomRowData;
import org.apache.iceberg.util.StructLikeWrapper;
import org.junit.Assert;

public class TestRowDataWrapper extends RecordWrapperTest {

/**
* Flink's time type has been truncated to millis seconds, so we need a customized assert method to check the
* values.
*/
@Override
public void testTime() {
generateAndValidate(new Schema(TIME.fields()), (message, expectedWrapper, actualWrapper) -> {
for (int pos = 0; pos < TIME.fields().size(); pos++) {
Object expected = expectedWrapper.get().get(pos, Object.class);
Object actual = actualWrapper.get().get(pos, Object.class);
if (expected == actual) {
return;
}

if (expected == null || actual == null) {
Assert.fail(String.format("The expected value is %s but actual value is %s", expected, actual));
}

int expectedMilliseconds = (int) ((long) expected / 1000_000);
int actualMilliseconds = (int) ((long) actual / 1000_000);
Assert.assertEquals(message, expectedMilliseconds, actualMilliseconds);
}
});
}

@Override
protected void generateAndValidate(Schema schema, RecordWrapperTest.AssertMethod assertMethod) {
int numRecords = 100;
Iterable<Record> recordList = RandomGenericData.generate(schema, numRecords, 101L);
Iterable<RowData> rowDataList = RandomRowData.generate(schema, numRecords, 101L);

InternalRecordWrapper recordWrapper = new InternalRecordWrapper(schema.asStruct());
RowDataWrapper rowDataWrapper = new RowDataWrapper(FlinkSchemaUtil.convert(schema), schema.asStruct());

Iterator<Record> actual = recordList.iterator();
Iterator<RowData> expected = rowDataList.iterator();

StructLikeWrapper actualWrapper = StructLikeWrapper.forType(schema.asStruct());
StructLikeWrapper expectedWrapper = StructLikeWrapper.forType(schema.asStruct());
for (int i = 0; i < numRecords; i++) {
Assert.assertTrue("Should have more records", actual.hasNext());
Assert.assertTrue("Should have more RowData", expected.hasNext());

StructLike recordStructLike = recordWrapper.wrap(actual.next());
StructLike rowDataStructLike = rowDataWrapper.wrap(expected.next());

assertMethod.assertEquals("Should have expected StructLike values",
actualWrapper.set(recordStructLike), expectedWrapper.set(rowDataStructLike));
}

Assert.assertFalse("Shouldn't have more record", actual.hasNext());
Assert.assertFalse("Shouldn't have more RowData", expected.hasNext());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.spark.source;

import java.util.Iterator;
import org.apache.iceberg.RecordWrapperTest;
import org.apache.iceberg.Schema;
import org.apache.iceberg.StructLike;
import org.apache.iceberg.data.InternalRecordWrapper;
import org.apache.iceberg.data.RandomGenericData;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.spark.SparkSchemaUtil;
import org.apache.iceberg.spark.data.RandomData;
import org.apache.iceberg.util.StructLikeWrapper;
import org.apache.spark.sql.catalyst.InternalRow;
import org.junit.Assert;
import org.junit.Ignore;

public class TestInternalRowWrapper extends RecordWrapperTest {

@Ignore
@Override
public void testTimestampWithoutZone() {
// Spark does not support timestamp without zone.
}

@Ignore
@Override
public void testTime() {
// Spark does not support time fields.
}

@Override
protected void generateAndValidate(Schema schema, AssertMethod assertMethod) {
int numRecords = 100;
Iterable<Record> recordList = RandomGenericData.generate(schema, numRecords, 101L);
Iterable<InternalRow> rowList = RandomData.generateSpark(schema, numRecords, 101L);

InternalRecordWrapper recordWrapper = new InternalRecordWrapper(schema.asStruct());
InternalRowWrapper rowWrapper = new InternalRowWrapper(SparkSchemaUtil.convert(schema));

Iterator<Record> actual = recordList.iterator();
Iterator<InternalRow> expected = rowList.iterator();

StructLikeWrapper actualWrapper = StructLikeWrapper.forType(schema.asStruct());
StructLikeWrapper expectedWrapper = StructLikeWrapper.forType(schema.asStruct());
for (int i = 0; i < numRecords; i++) {
Assert.assertTrue("Should have more records", actual.hasNext());
Assert.assertTrue("Should have more InternalRow", expected.hasNext());

StructLike recordStructLike = recordWrapper.wrap(actual.next());
StructLike rowStructLike = rowWrapper.wrap(expected.next());

assertMethod.assertEquals("Should have expected StructLike values",
actualWrapper.set(recordStructLike), expectedWrapper.set(rowStructLike));
}

Assert.assertFalse("Shouldn't have more record", actual.hasNext());
Assert.assertFalse("Shouldn't have more InternalRow", expected.hasNext());
}
}