Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions hudi-flink/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -144,12 +144,6 @@
<artifactId>flink-hadoop-compatibility_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-avro</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-parquet_${scala.binary.version}</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
* SQL API.
*
* <p>Note: Changes in this class need to be kept in sync with the corresponding runtime classes
* {@link org.apache.flink.formats.avro.AvroRowDeserializationSchema} and {@link org.apache.flink.formats.avro.AvroRowSerializationSchema}.
* {@code org.apache.flink.formats.avro.AvroRowDeserializationSchema} and {@code org.apache.flink.formats.avro.AvroRowSerializationSchema}.
*
* <p>NOTE: reference from Flink release 1.12.0, should remove when Flink version upgrade to that.
*/
Expand Down Expand Up @@ -294,7 +294,7 @@ public static Schema convertToSchema(LogicalType logicalType, String rowName) {
}
}

private static LogicalType extractValueTypeToAvroMap(LogicalType type) {
public static LogicalType extractValueTypeToAvroMap(LogicalType type) {
LogicalType keyType;
LogicalType valueType;
if (type instanceof MapType) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,6 @@
import java.util.List;
import java.util.Map;

import static org.apache.flink.formats.avro.typeutils.AvroSchemaConverter.extractValueTypeToAvroMap;

/**
* Tool class used to convert from Avro {@link GenericRecord} to {@link RowData}.
*
Expand Down Expand Up @@ -188,7 +186,7 @@ private static AvroToRowDataConverter createMapConverter(LogicalType type) {
final AvroToRowDataConverter keyConverter =
createConverter(DataTypes.STRING().getLogicalType());
final AvroToRowDataConverter valueConverter =
createNullableConverter(extractValueTypeToAvroMap(type));
createNullableConverter(AvroSchemaConverter.extractValueTypeToAvroMap(type));

return avroObject -> {
final Map<?, ?> map = (Map<?, ?>) avroObject;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,6 @@
import java.util.List;
import java.util.Map;

import static org.apache.flink.formats.avro.typeutils.AvroSchemaConverter.extractValueTypeToAvroMap;

/**
* Tool class used to convert from {@link RowData} to Avro {@link GenericRecord}.
*
Expand Down Expand Up @@ -279,7 +277,7 @@ public Object convert(Schema schema, Object object) {
}

private static RowDataToAvroConverter createMapConverter(LogicalType type) {
LogicalType valueType = extractValueTypeToAvroMap(type);
LogicalType valueType = AvroSchemaConverter.extractValueTypeToAvroMap(type);
final ArrayData.ElementGetter valueGetter = ArrayData.createElementGetter(valueType);
final RowDataToAvroConverter valueConverter = createConverter(valueType);

Expand Down
22 changes: 9 additions & 13 deletions packaging/hudi-flink-bundle/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@
<inclide>com.esotericsoftware:kryo-shaded</inclide>

<include>org.apache.flink:flink-hadoop-compatibility_${scala.binary.version}</include>
<include>org.apache.flink:flink-avro</include>
<include>org.apache.flink:flink-json</include>
<include>org.apache.flink:flink-parquet_${scala.binary.version}</include>

Expand Down Expand Up @@ -223,6 +222,11 @@
<pattern>org.eclipse.jetty.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}org.apache.jetty.</shadedPattern>
</relocation>
<!-- Shade kryo-shaded because it may conflict with kryo used by flink -->
<relocation>
<pattern>com.esotericsoftware.kryo.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}com.esotericsoftware.kryo.</shadedPattern>
</relocation>
</relocations>
<filters>
<filter>
Expand Down Expand Up @@ -329,12 +333,6 @@
<version>${flink.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-avro</artifactId>
<version>${flink.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-parquet_${scala.binary.version}</artifactId>
Expand Down Expand Up @@ -459,6 +457,10 @@
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo-shaded</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down Expand Up @@ -575,12 +577,6 @@
<version>4.0.2</version>
</dependency>

<dependency>
<groupId>com.esotericsoftware.kryo</groupId>
<artifactId>kryo</artifactId>
<version>2.24.0</version>
</dependency>

<!-- ORC -->
<dependency>
<groupId>org.apache.orc</groupId>
Expand Down