Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 35 additions & 9 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -364,6 +364,19 @@ project(':iceberg-hive-metastore') {
}

project(':iceberg-mr') {


repositories {
ivy {
url 'http://artifactory.corp.linkedin.com:8081/artifactory/release'
layout 'pattern', {
ivy '[organisation]/[module]/[revision]/[module]-[revision].ivy'
artifact '[organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext]'
m2compatible = true
}
}
}

configurations {
testCompile {
exclude group: 'org.apache.parquet', module: 'parquet-hadoop-bundle'
Expand All @@ -381,8 +394,7 @@ project(':iceberg-mr') {
compileOnly("org.apache.hadoop:hadoop-client") {
exclude group: 'org.apache.avro', module: 'avro'
}

compileOnly("org.apache.hive:hive-exec::core") {
compileOnly(group: 'com.linkedin.hive', name: 'hive-exec', version: '1.1.0.200', classifier: 'core') {
exclude group: 'com.google.code.findbugs', module: 'jsr305'
exclude group: 'com.google.guava'
exclude group: 'com.google.protobuf', module: 'protobuf-java'
Expand All @@ -393,8 +405,18 @@ project(':iceberg-mr') {
exclude group: 'org.pentaho' // missing dependency
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
}
compileOnly("org.apache.hive:hive-metastore")
compileOnly("org.apache.hive:hive-serde")
compileOnly(group: 'com.linkedin.hive', name: 'hive-service', version: '1.1.0.200') {
exclude group: 'com.google.code.findbugs', module: 'jsr305'
exclude group: 'com.google.guava'
exclude group: 'com.google.protobuf', module: 'protobuf-java'
exclude group: 'org.apache.avro'
exclude group: 'org.apache.calcite.avatica'
exclude group: 'org.apache.hive', module: 'hive-llap-tez'
exclude group: 'org.apache.logging.log4j'
exclude group: 'org.pentaho' // missing dependency
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'com.linkedin.hive', module: 'hive-exec'
}

testCompile project(path: ':iceberg-data', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
Expand All @@ -403,14 +425,18 @@ project(':iceberg-mr') {

testCompile("org.apache.avro:avro:1.9.2")
testCompile("org.apache.calcite:calcite-core")
testCompile("com.esotericsoftware:kryo-shaded:4.0.2")
testCompile("com.esotericsoftware.kryo:kryo:2.22")
testCompile("com.fasterxml.jackson.core:jackson-annotations:2.6.5")
testCompile("com.klarna:hiverunner:5.2.1") {
testCompile("com.klarna:hiverunner:3.2.1") {
exclude group: 'javax.jms', module: 'jms'
exclude group: 'org.apache.hive', module: 'hive-exec'
exclude group: 'org.codehaus.jettison', module: 'jettison'
exclude group: 'org.apache.calcite.avatica'
exclude group: 'org.apache.hive', module: 'hive-metastore'
exclude group: 'org.apache.hive', module: 'hive-serde'

}
testCompile("org.apache.commons:commons-lang3:3.1")
}
}

Expand All @@ -424,7 +450,7 @@ project(':iceberg-hive-runtime') {
exclude group: 'com.github.stephenc.findbugs'
exclude group: 'commons-pool'
exclude group: 'javax.annotation'
exclude group: 'javax.xml.bind'
exclude group: 'javax.xml.bind'
exclude group: 'org.apache.commons'
exclude group: 'org.slf4j'
exclude group: 'org.xerial.snappy'
Expand All @@ -434,7 +460,7 @@ project(':iceberg-hive-runtime') {
dependencies {
compile project(':iceberg-mr')
}

shadowJar {
configurations = [project.configurations.compile]

Expand All @@ -448,7 +474,7 @@ project(':iceberg-hive-runtime') {

// Relocate dependencies to avoid conflicts
relocate 'org.apache.avro', 'org.apache.iceberg.shaded.org.apache.avro'
relocate 'org.apache.parquet', 'org.apache.iceberg.shaded.org.apache.parquet'
relocate 'org.apache.parquet', 'org.apache.iceberg.shaded.org.apache.parquet'
relocate 'com.google', 'org.apache.iceberg.shaded.com.google'
relocate 'com.fasterxml', 'org.apache.iceberg.shaded.com.fasterxml'
relocate 'com.github.benmanes', 'org.apache.iceberg.shaded.com.github.benmanes'
Expand Down
24 changes: 2 additions & 22 deletions hive-metastore/src/test/resources/hive-schema-3.1.0.derby.sql
Original file line number Diff line number Diff line change
@@ -1,24 +1,3 @@
--
-- Licensed to the Apache Software Foundation (ASF) under one or more
-- contributor license agreements. See the NOTICE file distributed with
-- this work for additional information regarding copyright ownership.
-- The ASF licenses this file to You under the Apache License, Version 2.0
-- (the "License"); you may not use this file except in compliance with
-- the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
-- This file was copied from Apache Hive, at:
-- https://github.com/apache/hive/blob/master/standalone-metastore/metastore-server/src/main/sql/derby/hive-schema-3.1.0.derby.sql
--
-- This has been modified slightly for compatibility with older Hive versions.
--
-- Timestamp: 2011-09-22 15:32:02.024
-- Source database is: /home/carl/Work/repos/hive1/metastore/scripts/upgrade/derby/mdb
-- Connection URL is: jdbc:derby:/home/carl/Work/repos/hive1/metastore/scripts/upgrade/derby/mdb
Expand Down Expand Up @@ -96,7 +75,7 @@ CREATE TABLE "APP"."COLUMNS" ("SD_ID" BIGINT NOT NULL, "COMMENT" VARCHAR(256), "

CREATE TABLE "APP"."ROLES" ("ROLE_ID" BIGINT NOT NULL, "CREATE_TIME" INTEGER NOT NULL, "OWNER_NAME" VARCHAR(128), "ROLE_NAME" VARCHAR(128));

CREATE TABLE "APP"."TBLS" ("TBL_ID" BIGINT NOT NULL, "CREATE_TIME" INTEGER NOT NULL, "DB_ID" BIGINT, "LAST_ACCESS_TIME" INTEGER NOT NULL, "OWNER" VARCHAR(767), "OWNER_TYPE" VARCHAR(10), "RETENTION" INTEGER NOT NULL, "SD_ID" BIGINT, "TBL_NAME" VARCHAR(256), "TBL_TYPE" VARCHAR(128), "VIEW_EXPANDED_TEXT" LONG VARCHAR, "VIEW_ORIGINAL_TEXT" LONG VARCHAR, "IS_REWRITE_ENABLED" CHAR(1) NOT NULL DEFAULT 'N');
CREATE TABLE "APP"."TBLS" ("TBL_ID" BIGINT NOT NULL, "CREATE_TIME" INTEGER NOT NULL, "DB_ID" BIGINT, "LAST_ACCESS_TIME" INTEGER NOT NULL, "OWNER" VARCHAR(767), "OWNER_TYPE" VARCHAR(10), "RETENTION" INTEGER NOT NULL, "SD_ID" BIGINT, "TBL_NAME" VARCHAR(256), "TBL_TYPE" VARCHAR(128), "VIEW_EXPANDED_TEXT" CLOB, "VIEW_ORIGINAL_TEXT" CLOB, "IS_REWRITE_ENABLED" CHAR(1) NOT NULL DEFAULT 'N');

CREATE TABLE "APP"."PARTITION_KEYS" ("TBL_ID" BIGINT NOT NULL, "PKEY_COMMENT" VARCHAR(4000), "PKEY_NAME" VARCHAR(128) NOT NULL, "PKEY_TYPE" VARCHAR(767) NOT NULL, "INTEGER_IDX" INTEGER NOT NULL);

Expand Down Expand Up @@ -724,3 +703,4 @@ CREATE INDEX IDX_RUNTIME_STATS_CREATE_TIME ON RUNTIME_STATS(CREATE_TIME);
-- Record schema version. Should be the last step in the init script
-- -----------------------------------------------------------------
INSERT INTO "APP"."VERSION" (VER_ID, SCHEMA_VERSION, VERSION_COMMENT) VALUES (1, '3.1.0', 'Hive release version 3.1.0');

Original file line number Diff line number Diff line change
Expand Up @@ -24,25 +24,18 @@
import java.sql.Timestamp;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.io.sarg.ExpressionTree;
import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.iceberg.common.DynClasses;
import org.apache.iceberg.common.DynFields;
import org.apache.iceberg.expressions.Expression;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.util.DateTimeUtil;

import static org.apache.iceberg.expressions.Expressions.and;
import static org.apache.iceberg.expressions.Expressions.equal;
import static org.apache.iceberg.expressions.Expressions.greaterThanOrEqual;
import static org.apache.iceberg.expressions.Expressions.in;
import static org.apache.iceberg.expressions.Expressions.isNull;
import static org.apache.iceberg.expressions.Expressions.lessThan;
import static org.apache.iceberg.expressions.Expressions.lessThanOrEqual;
import static org.apache.iceberg.expressions.Expressions.not;
import static org.apache.iceberg.expressions.Expressions.or;
import static org.apache.iceberg.expressions.Expressions.*;


public class HiveIcebergFilterFactory {
Expand Down Expand Up @@ -139,7 +132,7 @@ private static Object leafToLiteral(PredicateLeaf leaf) {
case TIMESTAMP:
return microsFromTimestamp((Timestamp) LITERAL_FIELD.get(leaf));
case DECIMAL:
return hiveDecimalToBigDecimal((HiveDecimalWritable) LITERAL_FIELD.get(leaf));
return hiveDecimalToBigDecimal((HiveDecimal) LITERAL_FIELD.get(leaf));

default:
throw new UnsupportedOperationException("Unknown type: " + leaf.getType());
Expand All @@ -158,7 +151,7 @@ private static List<Object> leafToLiteralList(PredicateLeaf leaf) {
.collect(Collectors.toList());
case DECIMAL:
return LITERAL_LIST_FIELD.get(leaf).stream()
.map(value -> hiveDecimalToBigDecimal((HiveDecimalWritable) value))
.map(value -> hiveDecimalToBigDecimal((HiveDecimal) value))
.collect(Collectors.toList());
case TIMESTAMP:
return LITERAL_LIST_FIELD.get(leaf).stream()
Expand All @@ -169,15 +162,27 @@ private static List<Object> leafToLiteralList(PredicateLeaf leaf) {
}
}

private static BigDecimal hiveDecimalToBigDecimal(HiveDecimalWritable hiveDecimalWritable) {
return hiveDecimalWritable.getHiveDecimal().bigDecimalValue().setScale(hiveDecimalWritable.scale());
private static BigDecimal hiveDecimalToBigDecimal(HiveDecimal literal) {
return literal.bigDecimalValue();
//return hiveDecimalWritable.getHiveDecimal().bigDecimalValue().setScale(hiveDecimalWritable.getScale());
}

private static int daysFromDate(Date date) {
return DateTimeUtil.daysFromDate(date.toLocalDate());
}

private static int daysFromTimestamp(Timestamp timestamp) {
private static int daysFromTimestamp(Object literal) {
Timestamp timestamp;
if (literal instanceof DateWritable) {
Date date = ((DateWritable)literal).get();
timestamp = new Timestamp(date.getTime());
} else if(literal instanceof Date) {
timestamp = new Timestamp(((Date)literal).getTime());
} else if(literal instanceof Timestamp) {
timestamp = (Timestamp)literal;
} else {
throw new UnsupportedOperationException("Unknown object for DATE: " + literal.getClass().getSimpleName());
}
return DateTimeUtil.daysFromInstant(timestamp.toInstant());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,21 +24,30 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
import org.apache.hadoop.hive.ql.io.IOConstants;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.iceberg.Schema;
import org.apache.iceberg.SchemaParser;
import org.apache.iceberg.common.DynMethods;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.expressions.Expression;
import org.apache.iceberg.mr.InputFormatConfig;
import org.apache.iceberg.mr.SerializationUtil;
import org.apache.iceberg.mr.mapred.Container;
import org.apache.iceberg.mr.mapred.MapredIcebergInputFormat;
import org.apache.iceberg.mr.mapreduce.IcebergSplit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static org.apache.iceberg.mr.hive.HiveIcebergSerDe.*;


public class HiveIcebergInputFormat extends MapredIcebergInputFormat<Record>
implements CombineHiveInputFormat.AvoidSplitCombination {

Expand All @@ -56,6 +65,7 @@ public class HiveIcebergInputFormat extends MapredIcebergInputFormat<Record>
Configuration.class, ExprNodeGenericFuncDesc.class)
.orNoop()
.buildStatic();
static final String SPLIT_LOCATION = "iceberg.hive.split.location";

@Override
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
Expand All @@ -72,12 +82,24 @@ public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
}
}

String location = job.get(InputFormatConfig.TABLE_LOCATION);
String location = job.get(SPLIT_LOCATION);
return Arrays.stream(super.getSplits(job, numSplits))
.map(split -> new HiveIcebergSplit((IcebergSplit) split, location))
.toArray(InputSplit[]::new);
}

@Override
public RecordReader<Void, Container<Record>> getRecordReader(InputSplit split, JobConf job, Reporter reporter)
throws IOException {
if (job.get(IOConstants.SCHEMA_EVOLUTION_COLUMNS) != null) {
String tableColumns = job.get(IOConstants.SCHEMA_EVOLUTION_COLUMNS);
String tableColumnTypes = job.get(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES);
Schema readSchema = getSchemaFromTypeString(tableColumns, tableColumnTypes);
job.set(InputFormatConfig.READ_SCHEMA, SchemaParser.toJson(readSchema));
}
return super.getRecordReader(split, job, reporter);
}

@Override
public boolean shouldSkipCombine(Path path, Configuration conf) {
return true;
Expand Down
35 changes: 28 additions & 7 deletions mr/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergSerDe.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,25 @@

package org.apache.iceberg.mr.hive;

import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import javax.annotation.Nullable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.io.IOConstants;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Writable;
import org.apache.iceberg.Schema;
import org.apache.iceberg.SchemaParser;
import org.apache.iceberg.Table;
import org.apache.iceberg.hive.legacy.HiveTypeUtil;
import org.apache.iceberg.mr.Catalogs;
import org.apache.iceberg.mr.InputFormatConfig;
import org.apache.iceberg.mr.hive.serde.objectinspector.IcebergObjectInspector;
import org.apache.iceberg.mr.mapred.Container;

Expand All @@ -41,15 +47,22 @@ public class HiveIcebergSerDe extends AbstractSerDe {

@Override
public void initialize(@Nullable Configuration configuration, Properties serDeProperties) throws SerDeException {
Schema tableSchema;
if (configuration.get(InputFormatConfig.TABLE_SCHEMA) != null) {
tableSchema = SchemaParser.fromJson(configuration.get(InputFormatConfig.TABLE_SCHEMA));
Schema readSchema;
if (serDeProperties.getProperty(serdeConstants.LIST_COLUMNS) != null) {
String tableColumns = serDeProperties.getProperty(serdeConstants.LIST_COLUMNS);
String tableColumnTypes = serDeProperties.getProperty(serdeConstants.LIST_COLUMN_TYPES);
if (configuration.get(IOConstants.SCHEMA_EVOLUTION_COLUMNS) != null &&
configuration.get(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES) != null) {
tableColumns = configuration.get(IOConstants.SCHEMA_EVOLUTION_COLUMNS);
tableColumnTypes = configuration.get(IOConstants.SCHEMA_EVOLUTION_COLUMNS_TYPES);
}
readSchema = getSchemaFromTypeString(tableColumns, tableColumnTypes);
} else {
Table table = Catalogs.loadTable(configuration, serDeProperties);
tableSchema = table.schema();
readSchema = table.schema();
}
try {
this.inspector = IcebergObjectInspector.create(tableSchema);
this.inspector = IcebergObjectInspector.create(readSchema);
} catch (Exception e) {
throw new SerDeException(e);
}
Expand Down Expand Up @@ -79,4 +92,12 @@ public Object deserialize(Writable writable) {
public ObjectInspector getObjectInspector() {
return inspector;
}

public static Schema getSchemaFromTypeString(String tableColumns, String hiveTypeProperty) {
List<TypeInfo> typeInfoList = TypeInfoUtils.getTypeInfosFromTypeString(hiveTypeProperty);
List<String> colNames = Arrays.asList(tableColumns.split(","));
TypeInfo typeInfo = TypeInfoFactory.getStructTypeInfo(colNames, typeInfoList);
return new Schema(HiveTypeUtil.convert(typeInfo).asNestedType().asStructType().fields());
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
public class HiveIcebergStorageHandler implements HiveStoragePredicateHandler, HiveStorageHandler {

private static final String NAME = "name";
private static final String LOCATION = "location";

private Configuration conf;

Expand Down Expand Up @@ -79,6 +80,7 @@ public void configureInputJobProperties(TableDesc tableDesc, Map<String, String>
map.put(InputFormatConfig.TABLE_IDENTIFIER, props.getProperty(NAME));
map.put(InputFormatConfig.TABLE_LOCATION, table.location());
map.put(InputFormatConfig.TABLE_SCHEMA, SchemaParser.toJson(table.schema()));
map.put(HiveIcebergInputFormat.SPLIT_LOCATION, props.getProperty(LOCATION));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,12 @@ public void testScanEmptyTable() throws IOException {
Assert.assertEquals(0, rows.size());
}

@Test
public void testScanEmptyTableScan() throws IOException {
createTable("customers", CUSTOMER_SCHEMA, CUSTOMER_RECORDS);
List<Object[]> rows = shell.executeStatement("SELECT * FROM default.customers where customer_id=123L");
Assert.assertEquals(0, rows.size());
}
@Test
public void testScanTable() throws IOException {
createTable("customers", CUSTOMER_SCHEMA, CUSTOMER_RECORDS);
Expand Down
Loading