Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,11 @@
import java.nio.ByteBuffer;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.flink.table.data.ArrayData;
import org.apache.flink.table.data.DecimalData;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.MapData;
import org.apache.flink.table.data.RawValueData;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.TimestampData;

import org.apache.flink.table.data.*;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In iceberg, we usually don't use * to import package, it's more clear to import the specify package one by one.

import org.apache.iceberg.MetadataColumns;
import org.apache.iceberg.Schema;
import org.apache.iceberg.parquet.ParquetValueReader;
Expand Down Expand Up @@ -483,7 +478,7 @@ protected void addElement(ReusableArrayData reused, E element) {
@Override
protected ArrayData buildList(ReusableArrayData list) {
list.setNumElements(writePos);
return list;
return list.buildGenericArrayData();
}
}

Expand Down Expand Up @@ -541,7 +536,7 @@ protected void addPair(ReusableMapData map, K key, V value) {
@Override
protected MapData buildMap(ReusableMapData map) {
map.setNumElements(writePos);
return map;
return map.buildGenericMapData();
}
}

Expand Down Expand Up @@ -634,6 +629,14 @@ public void setNumElements(int numElements) {
values.setNumElements(numElements);
}

public GenericMapData buildGenericMapData() {
Map<Object, Object> map = Maps.newHashMapWithExpectedSize(numElements);
for (int i = 0; i < numElements; i++) {
map.put(keys.values[i], values.values[i]);
}
return new GenericMapData(map);
}

@Override
public int size() {
return numElements;
Expand Down Expand Up @@ -675,6 +678,10 @@ public void setNumElements(int numElements) {
this.numElements = numElements;
}

public GenericArrayData buildGenericArrayData() {
return new GenericArrayData(Arrays.copyOf(values, numElements));
}

@Override
public int size() {
return numElements;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.apache.iceberg.data.RandomGenericData;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.flink.FlinkSchemaUtil;
import org.apache.iceberg.flink.TestFixtures;
import org.apache.iceberg.flink.TestHelpers;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Types;
Expand Down Expand Up @@ -61,6 +62,37 @@ protected List<Row> run(
return runFormat(formatBuilder.tableLoader(tableLoader()).buildFormat());
}

@Test
public void testReadMap() throws Exception {
Schema MAP_SCHEMA = new Schema(
required(0, "id", Types.LongType.get()),
required(1, "data", Types.MapType.ofOptional(2, 3,
Types.StringType.get(), Types.StringType.get())));

Table table = catalog.createTable(TestFixtures.TABLE_IDENTIFIER, MAP_SCHEMA);

List<Record> expectedRecords = RandomGenericData.generate(MAP_SCHEMA, 1, 0L);

new GenericAppenderHelper(table, fileFormat, TEMPORARY_FOLDER).appendToTable(expectedRecords);

TestHelpers.assertRecords(run(), expectedRecords, MAP_SCHEMA);
}

@Test
public void testReadArray() throws Exception {
Schema ARRAY_SCHEMA = new Schema(
required(0, "id", Types.LongType.get()),
required(1, "data", Types.ListType.ofRequired(2, Types.StringType.get())));

Table table = catalog.createTable(TestFixtures.TABLE_IDENTIFIER, ARRAY_SCHEMA);

List<Record> expectedRecords = RandomGenericData.generate(ARRAY_SCHEMA, 1, 0L);

new GenericAppenderHelper(table, fileFormat, TEMPORARY_FOLDER).appendToTable(expectedRecords);

TestHelpers.assertRecords(run(), expectedRecords, ARRAY_SCHEMA);
}

@Test
public void testNestedProjection() throws Exception {
Schema schema = new Schema(
Expand Down