diff --git a/build.gradle b/build.gradle index 21e0df5d4f7b..d9cc43c4d62c 100644 --- a/build.gradle +++ b/build.gradle @@ -87,6 +87,7 @@ subprojects { all { exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.mortbay.jetty' + exclude group: 'org.pentaho', module: 'pentaho-aggdesigner-algorithm' resolutionStrategy { force 'com.fasterxml.jackson.module:jackson-module-scala_2.11:2.10.2' @@ -342,6 +343,44 @@ project(':iceberg-mr') { } compileOnly("org.apache.hive:hive-serde") + compileOnly("org.apache.hive:hive-exec::core") { + exclude group: 'org.apache.avro', module: 'avro' + exclude group: 'org.slf4j', module: 'slf4j-log4j12' + exclude group: 'org.pentaho' // missing dependency + exclude group: 'org.apache.hive', module: 'hive-llap-tez' + exclude group: 'org.apache.logging.log4j' + exclude group: 'com.google.protobuf', module: 'protobuf-java' + exclude group: 'org.apache.calcite' + exclude group: 'org.apache.calcite.avatica' + exclude group: 'com.google.code.findbugs', module: 'jsr305' + exclude group: 'com.google.guava' + } + + compileOnly "org.apache.hive:hive-metastore" + compileOnly "org.apache.hive:hive-serde" + + testCompile("com.klarna:hiverunner:5.2.1") { + exclude group: 'javax.jms', module: 'jms' + exclude group: 'org.apache.hive', module: 'hive-exec' + exclude group: 'org.codehaus.jettison', module: 'jettison' + exclude group: 'org.apache.calcite.avatica' + } + + testCompile("org.apache.hive:hive-exec::core") { + exclude group: 'org.apache.avro', module: 'avro' + exclude group: 'org.slf4j', module: 'slf4j-log4j12' + exclude group: 'org.pentaho' // missing dependency + exclude group: 'org.apache.hive', module: 'hive-llap-tez' + exclude group: 'org.apache.logging.log4j' + exclude group: 'com.google.protobuf', module: 'protobuf-java' + exclude group: 'org.apache.calcite.avatica' + exclude group: 'com.google.code.findbugs', module: 'jsr305' + } + + testCompile("org.apache.calcite:calcite-core") + testCompile("com.esotericsoftware.kryo:kryo:2.24.0") + testCompile("com.fasterxml.jackson.core:jackson-annotations:2.6.5") + testCompile project(path: ':iceberg-data', configuration: 'testArtifacts') testCompile project(path: ':iceberg-api', configuration: 'testArtifacts') testCompile project(path: ':iceberg-core', configuration: 'testArtifacts') diff --git a/mr/dependencies.lock b/mr/dependencies.lock index 9fbd7551d7a1..99c48ccf7e10 100644 --- a/mr/dependencies.lock +++ b/mr/dependencies.lock @@ -618,6 +618,44 @@ "asm:asm-commons" ] }, + "ch.qos.logback:logback-classic": { + "locked": "1.0.9", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "ch.qos.logback:logback-core": { + "locked": "1.0.9", + "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "co.cask.tephra:tephra-api": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-core": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-hbase-compat-1.0": { + "locked": "0.6.0", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "com.fasterxml.jackson.core:jackson-annotations": { "locked": "2.10.2", "transitive": [ @@ -656,6 +694,10 @@ "com.github.stephenc.findbugs:findbugs-annotations": { "locked": "1.3.9-1", "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -668,13 +710,20 @@ "locked": "3.0.0", "transitive": [ "org.apache.hadoop:hadoop-common", - "org.apache.hive:hive-serde" + "org.apache.hive:hive-serde", + "org.apache.twill:twill-api", + "org.apache.twill:twill-common", + "org.apache.twill:twill-zookeeper" ] }, "com.google.code.gson:gson": { "locked": "2.2.4", "transitive": [ - "org.apache.hadoop:hadoop-common" + "co.cask.tephra:tephra-core", + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-exec", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" ] }, "com.google.errorprone:error_prone_annotations": { @@ -683,9 +732,26 @@ "com.github.ben-manes.caffeine:caffeine" ] }, + "com.google.inject.extensions:guice-assistedinject": { + "locked": "3.0", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "com.google.inject.extensions:guice-servlet": { + "locked": "3.0", + "transitive": [ + "com.sun.jersey.contribs:jersey-guice", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hadoop:hadoop-yarn-common" + ] + }, "com.google.inject:guice": { "locked": "3.0", "transitive": [ + "co.cask.tephra:tephra-core", + "com.google.inject.extensions:guice-assistedinject", + "com.google.inject.extensions:guice-servlet", "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager" @@ -705,6 +771,10 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", + "org.apache.hive:hive-metastore", "org.apache.orc:orc-core" ] }, @@ -720,6 +790,12 @@ "org.apache.hadoop:hadoop-common" ] }, + "com.jolbox:bonecp": { + "locked": "0.8.0.RELEASE", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "com.sun.jersey.contribs:jersey-guice": { "locked": "1.9", "transitive": [ @@ -773,6 +849,12 @@ "org.apache.hive:hive-common" ] }, + "com.zaxxer:HikariCP": { + "locked": "2.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "commons-beanutils:commons-beanutils": { "locked": "1.7.0", "transitive": [ @@ -793,6 +875,7 @@ "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hive:hive-common", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-service-rpc" ] }, @@ -806,6 +889,9 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hive:hive-exec", "org.apache.hive:hive-serde", "org.apache.hive:hive-service-rpc", "org.apache.httpcomponents:httpclient" @@ -815,7 +901,8 @@ "locked": "3.2.2", "transitive": [ "commons-configuration:commons-configuration", - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-common" ] }, "commons-configuration:commons-configuration": { @@ -824,6 +911,12 @@ "org.apache.hadoop:hadoop-common" ] }, + "commons-dbcp:commons-dbcp": { + "locked": "1.4", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "commons-digester:commons-digester": { "locked": "1.8", "transitive": [ @@ -839,7 +932,8 @@ "commons-httpclient:commons-httpclient": { "locked": "3.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-exec" ] }, "commons-io:commons-io": { @@ -847,7 +941,10 @@ "transitive": [ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hive:hive-exec" ] }, "commons-lang:commons-lang": { @@ -860,11 +957,16 @@ "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", "org.apache.hive.shims:hive-shims-common", "org.apache.hive:hive-common", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", "org.apache.hive:hive-storage-api", - "org.apache.orc:orc-core" + "org.apache.hive:hive-vector-code-gen", + "org.apache.orc:orc-core", + "org.apache.velocity:velocity" ] }, "commons-logging:commons-logging": { @@ -884,6 +986,9 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", "org.apache.httpcomponents:httpclient" ] }, @@ -896,6 +1001,8 @@ "commons-pool:commons-pool": { "locked": "1.6", "transitive": [ + "commons-dbcp:commons-dbcp", + "org.apache.hive:hive-metastore", "org.apache.parquet:parquet-hadoop" ] }, @@ -908,6 +1015,7 @@ "io.dropwizard.metrics:metrics-core": { "locked": "3.1.2", "transitive": [ + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", @@ -930,13 +1038,21 @@ "locked": "3.7.0.Final", "transitive": [ "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.zookeeper:zookeeper" ] }, "io.netty:netty-all": { "locked": "4.0.23.Final", "transitive": [ - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client" + ] + }, + "it.unimi.dsi:fastutil": { + "locked": "6.5.6", + "transitive": [ + "co.cask.tephra:tephra-core" ] }, "javax.activation:activation": { @@ -959,6 +1075,12 @@ "com.sun.jersey.contribs:jersey-guice" ] }, + "javax.jdo:jdo-api": { + "locked": "3.0.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "javax.mail:mail": { "locked": "1.4.1", "transitive": [ @@ -981,6 +1103,18 @@ "tomcat:jasper-runtime" ] }, + "javax.transaction:jta": { + "locked": "1.1", + "transitive": [ + "javax.jdo:jdo-api" + ] + }, + "javax.transaction:transaction-api": { + "locked": "1.1", + "transitive": [ + "org.datanucleus:javax.jdo" + ] + }, "javax.xml.bind:jaxb-api": { "locked": "2.2.11", "transitive": [ @@ -990,6 +1124,12 @@ "org.apache.orc:orc-core" ] }, + "javolution:javolution": { + "locked": "5.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "jline:jline": { "locked": "2.12", "transitive": [ @@ -1003,6 +1143,15 @@ "org.apache.hive:hive-common" ] }, + "junit:junit": { + "locked": "4.11", + "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol" + ] + }, "log4j:log4j": { "locked": "1.2.17", "transitive": [ @@ -1010,6 +1159,9 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", "org.apache.zookeeper:zookeeper" ] }, @@ -1025,10 +1177,26 @@ "org.apache.hive:hive-serde" ] }, + "org.antlr:ST4": { + "locked": "4.0.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.antlr:antlr-runtime": { + "locked": "3.5.2", + "transitive": [ + "org.antlr:ST4", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, "org.apache.ant:ant": { "locked": "1.9.1", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-vector-code-gen" ] }, "org.apache.ant:ant-launcher": { @@ -1041,6 +1209,7 @@ "locked": "1.9.2", "transitive": [ "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hive:hive-serde", "org.apache.iceberg:iceberg-core" ] @@ -1051,7 +1220,8 @@ "org.apache.avro:avro", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec" ] }, "org.apache.commons:commons-lang3": { @@ -1066,6 +1236,12 @@ "org.apache.hadoop:hadoop-common" ] }, + "org.apache.curator:apache-curator": { + "locked": "2.7.1", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.curator:curator-client": { "locked": "2.7.1", "transitive": [ @@ -1078,7 +1254,8 @@ "transitive": [ "org.apache.curator:curator-recipes", "org.apache.hadoop:hadoop-auth", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec" ] }, "org.apache.curator:curator-recipes": { @@ -1087,6 +1264,12 @@ "org.apache.hadoop:hadoop-common" ] }, + "org.apache.derby:derby": { + "locked": "10.10.2.0", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.apache.directory.api:api-asn1-api": { "locked": "1.0.0-M20", "transitive": [ @@ -1133,13 +1316,17 @@ "locked": "2.7.3", "transitive": [ "org.apache.hadoop:hadoop-client", - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hadoop:hadoop-yarn-api", + "org.apache.hadoop:hadoop-yarn-common" ] }, "org.apache.hadoop:hadoop-auth": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-client" ] }, "org.apache.hadoop:hadoop-client": { @@ -1149,7 +1336,9 @@ "locked": "2.7.3", "transitive": [ "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common" ] }, "org.apache.hadoop:hadoop-hdfs": { @@ -1176,7 +1365,9 @@ "locked": "2.7.3", "transitive": [ "org.apache.hadoop:hadoop-client", - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common" ] }, "org.apache.hadoop:hadoop-mapreduce-client-jobclient": { @@ -1232,6 +1423,33 @@ "org.apache.hadoop:hadoop-mapreduce-client-shuffle" ] }, + "org.apache.hbase:hbase-annotations": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol" + ] + }, + "org.apache.hbase:hbase-client": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hbase:hbase-common": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client" + ] + }, + "org.apache.hbase:hbase-protocol": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common" + ] + }, "org.apache.hive.shims:hive-shims-common": { "locked": "2.3.7", "transitive": [ @@ -1244,9 +1462,18 @@ "org.apache.hive:hive-serde" ] }, - "org.apache.hive:hive-serde": { + "org.apache.hive:hive-exec": { + "locked": "2.3.7" + }, + "org.apache.hive:hive-metastore": { "locked": "2.3.7" }, + "org.apache.hive:hive-serde": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.apache.hive:hive-service-rpc": { "locked": "2.3.7", "transitive": [ @@ -1257,6 +1484,8 @@ "locked": "2.3.7", "transitive": [ "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde" ] }, @@ -1266,11 +1495,19 @@ "org.apache.hive:hive-common" ] }, + "org.apache.hive:hive-vector-code-gen": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.htrace:htrace-core": { "locked": "3.1.0-incubating", "transitive": [ "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common" ] }, "org.apache.httpcomponents:httpclient": { @@ -1328,6 +1565,12 @@ "org.apache.iceberg:iceberg-parquet": { "project": true }, + "org.apache.ivy:ivy": { + "locked": "2.4.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.logging.log4j:log4j-1.2-api": { "locked": "2.6.2", "transitive": [ @@ -1431,18 +1674,72 @@ "org.apache.thrift:libfb303": { "locked": "0.9.3", "transitive": [ + "org.apache.hive:hive-metastore", "org.apache.hive:hive-service-rpc" ] }, "org.apache.thrift:libthrift": { "locked": "0.9.3", "transitive": [ + "co.cask.tephra:tephra-core", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", "org.apache.hive:hive-service-rpc", "org.apache.thrift:libfb303" ] }, + "org.apache.twill:twill-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-common": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-api", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "org.apache.twill:twill-discovery-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.twill:twill-discovery-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core" + ] + }, + "org.apache.twill:twill-zookeeper": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.velocity:velocity": { + "locked": "1.5", + "transitive": [ + "org.apache.hive:hive-vector-code-gen" + ] + }, "org.apache.yetus:audience-annotations": { "locked": "0.11.0", "transitive": [ @@ -1452,13 +1749,17 @@ "org.apache.zookeeper:zookeeper": { "locked": "3.4.6", "transitive": [ + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", "org.apache.hadoop:hadoop-auth", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-server-common", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.twill:twill-zookeeper" ] }, "org.checkerframework:checker-qual": { @@ -1467,6 +1768,12 @@ "com.github.ben-manes.caffeine:caffeine" ] }, + "org.codehaus.groovy:groovy-all": { + "locked": "2.4.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.codehaus.jackson:jackson-core-asl": { "locked": "1.9.13", "transitive": [ @@ -1493,6 +1800,7 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-client", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-xc" ] @@ -1511,6 +1819,31 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager" ] }, + "org.datanucleus:datanucleus-api-jdo": { + "locked": "4.2.4", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-core": { + "locked": "4.1.17", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-rdbms": { + "locked": "4.1.19", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:javax.jdo": { + "locked": "3.2.0-m3", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.eclipse.jetty.aggregate:jetty-all": { "locked": "7.6.0.v20120127", "transitive": [ @@ -1532,16 +1865,45 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager" ] }, + "org.hamcrest:hamcrest-core": { + "locked": "1.3", + "transitive": [ + "junit:junit" + ] + }, "org.jetbrains:annotations": { "locked": "17.0.0", "transitive": [ "org.apache.orc:orc-core" ] }, + "org.jruby.jcodings:jcodings": { + "locked": "1.0.8", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.jruby.joni:joni" + ] + }, + "org.jruby.joni:joni": { + "locked": "2.1.2", + "transitive": [ + "org.apache.hbase:hbase-client" + ] + }, + "org.ow2.asm:asm-all": { + "locked": "5.0.2", + "transitive": [ + "org.apache.twill:twill-core" + ] + }, "org.slf4j:slf4j-api": { "locked": "1.7.25", "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", + "com.jolbox:bonecp", + "com.zaxxer:HikariCP", "io.dropwizard.metrics:metrics-core", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", @@ -1562,10 +1924,13 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hive.shims:hive-shims-common", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", "org.apache.hive:hive-service-rpc", "org.apache.hive:hive-shims", "org.apache.hive:hive-storage-api", + "org.apache.hive:hive-vector-code-gen", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -1578,6 +1943,9 @@ "org.apache.parquet:parquet-common", "org.apache.parquet:parquet-format-structures", "org.apache.thrift:libthrift", + "org.apache.twill:twill-common", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", "org.apache.zookeeper:zookeeper" ] }, @@ -1599,6 +1967,18 @@ "org.apache.parquet:parquet-hadoop" ] }, + "oro:oro": { + "locked": "2.0.8", + "transitive": [ + "org.apache.velocity:velocity" + ] + }, + "stax:stax-api": { + "locked": "1.0.1", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "tomcat:jasper-compiler": { "locked": "5.5.23", "transitive": [ @@ -1664,6 +2044,44 @@ "asm:asm-commons" ] }, + "ch.qos.logback:logback-classic": { + "locked": "1.0.9", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "ch.qos.logback:logback-core": { + "locked": "1.0.9", + "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "co.cask.tephra:tephra-api": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-core": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-hbase-compat-1.0": { + "locked": "0.6.0", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "com.fasterxml.jackson.core:jackson-annotations": { "locked": "2.6.0", "transitive": [ @@ -1689,22 +2107,40 @@ "org.apache.hive:hive-common" ] }, + "com.github.stephenc.findbugs:findbugs-annotations": { + "locked": "1.3.9-1", + "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol" + ] + }, "com.google.code.findbugs:jsr305": { "locked": "3.0.0", "transitive": [ "org.apache.hadoop:hadoop-common", - "org.apache.hive:hive-serde" + "org.apache.hive:hive-serde", + "org.apache.twill:twill-api", + "org.apache.twill:twill-common", + "org.apache.twill:twill-zookeeper" ] }, "com.google.code.gson:gson": { "locked": "2.2.4", "transitive": [ - "org.apache.hadoop:hadoop-common" + "co.cask.tephra:tephra-core", + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-exec", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" ] }, "com.google.guava:guava": { "locked": "16.0.1", "transitive": [ + "co.cask.tephra:tephra-core", + "com.jolbox:bonecp", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", @@ -1718,13 +2154,25 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", "org.apache.hadoop:hadoop-yarn-server-web-proxy", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-metastore", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "com.google.inject.extensions:guice-assistedinject": { + "locked": "3.0", + "transitive": [ + "co.cask.tephra:tephra-core" ] }, "com.google.inject.extensions:guice-servlet": { "locked": "3.0", "transitive": [ "com.sun.jersey.contribs:jersey-guice", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-resourcemanager" @@ -1733,6 +2181,8 @@ "com.google.inject:guice": { "locked": "3.0", "transitive": [ + "co.cask.tephra:tephra-core", + "com.google.inject.extensions:guice-assistedinject", "com.google.inject.extensions:guice-servlet", "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-yarn-common", @@ -1757,6 +2207,10 @@ "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", + "org.apache.hive:hive-metastore", "org.apache.orc:orc-core" ] }, @@ -1772,6 +2226,12 @@ "org.apache.hadoop:hadoop-common" ] }, + "com.jolbox:bonecp": { + "locked": "0.8.0.RELEASE", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "com.sun.jersey.contribs:jersey-guice": { "locked": "1.9", "transitive": [ @@ -1839,6 +2299,12 @@ "org.apache.avro:avro" ] }, + "com.zaxxer:HikariCP": { + "locked": "2.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "commons-beanutils:commons-beanutils": { "locked": "1.7.0", "transitive": [ @@ -1859,6 +2325,7 @@ "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hive:hive-common", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-service-rpc" ] }, @@ -1872,6 +2339,9 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hive:hive-exec", "org.apache.hive:hive-serde", "org.apache.hive:hive-service-rpc", "org.apache.httpcomponents:httpclient" @@ -1882,7 +2352,8 @@ "transitive": [ "commons-configuration:commons-configuration", "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice" + "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", + "org.apache.hbase:hbase-common" ] }, "commons-configuration:commons-configuration": { @@ -1891,6 +2362,12 @@ "org.apache.hadoop:hadoop-common" ] }, + "commons-dbcp:commons-dbcp": { + "locked": "1.4", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "commons-digester:commons-digester": { "locked": "1.8", "transitive": [ @@ -1906,7 +2383,8 @@ "commons-httpclient:commons-httpclient": { "locked": "3.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-exec" ] }, "commons-io:commons-io": { @@ -1915,7 +2393,10 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hive:hive-exec" ] }, "commons-lang:commons-lang": { @@ -1929,12 +2410,17 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", "org.apache.hive.shims:hive-shims-0.23", "org.apache.hive.shims:hive-shims-common", "org.apache.hive:hive-common", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", "org.apache.hive:hive-storage-api", - "org.apache.orc:orc-core" + "org.apache.hive:hive-vector-code-gen", + "org.apache.orc:orc-core", + "org.apache.velocity:velocity" ] }, "commons-logging:commons-logging": { @@ -1957,6 +2443,9 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", "org.apache.httpcomponents:httpclient" ] }, @@ -1966,6 +2455,13 @@ "org.apache.hadoop:hadoop-common" ] }, + "commons-pool:commons-pool": { + "locked": "1.5.4", + "transitive": [ + "commons-dbcp:commons-dbcp", + "org.apache.hive:hive-metastore" + ] + }, "io.airlift:aircompressor": { "locked": "0.8", "transitive": [ @@ -1981,6 +2477,7 @@ "io.dropwizard.metrics:metrics-core": { "locked": "3.1.2", "transitive": [ + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", @@ -2003,13 +2500,21 @@ "locked": "3.7.0.Final", "transitive": [ "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.zookeeper:zookeeper" ] }, "io.netty:netty-all": { "locked": "4.0.23.Final", "transitive": [ - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client" + ] + }, + "it.unimi.dsi:fastutil": { + "locked": "6.5.6", + "transitive": [ + "co.cask.tephra:tephra-core" ] }, "javax.activation:activation": { @@ -2027,7 +2532,13 @@ "com.sun.jersey.contribs:jersey-guice" ] }, - "javax.mail:mail": { + "javax.jdo:jdo-api": { + "locked": "3.0.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "javax.mail:mail": { "locked": "1.4.1", "transitive": [ "org.eclipse.jetty.aggregate:jetty-all" @@ -2055,6 +2566,18 @@ "tomcat:jasper-runtime" ] }, + "javax.transaction:jta": { + "locked": "1.1", + "transitive": [ + "javax.jdo:jdo-api" + ] + }, + "javax.transaction:transaction-api": { + "locked": "1.1", + "transitive": [ + "org.datanucleus:javax.jdo" + ] + }, "javax.xml.bind:jaxb-api": { "locked": "2.2.2", "transitive": [ @@ -2071,6 +2594,12 @@ "javax.xml.bind:jaxb-api" ] }, + "javolution:javolution": { + "locked": "5.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "jline:jline": { "locked": "2.12", "transitive": [ @@ -2084,6 +2613,15 @@ "org.apache.hive:hive-common" ] }, + "junit:junit": { + "locked": "4.11", + "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol" + ] + }, "log4j:log4j": { "locked": "1.2.17", "transitive": [ @@ -2093,6 +2631,9 @@ "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", "org.apache.zookeeper:zookeeper" ] }, @@ -2108,10 +2649,26 @@ "org.apache.hive:hive-serde" ] }, + "org.antlr:ST4": { + "locked": "4.0.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.antlr:antlr-runtime": { + "locked": "3.5.2", + "transitive": [ + "org.antlr:ST4", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, "org.apache.ant:ant": { "locked": "1.9.1", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-vector-code-gen" ] }, "org.apache.ant:ant-launcher": { @@ -2124,6 +2681,7 @@ "locked": "1.7.7", "transitive": [ "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hive:hive-serde" ] }, @@ -2133,7 +2691,8 @@ "org.apache.avro:avro", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec" ] }, "org.apache.commons:commons-lang3": { @@ -2148,6 +2707,12 @@ "org.apache.hadoop:hadoop-common" ] }, + "org.apache.curator:apache-curator": { + "locked": "2.7.1", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.curator:curator-client": { "locked": "2.7.1", "transitive": [ @@ -2160,7 +2725,8 @@ "transitive": [ "org.apache.curator:curator-recipes", "org.apache.hadoop:hadoop-auth", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec" ] }, "org.apache.curator:curator-recipes": { @@ -2169,6 +2735,12 @@ "org.apache.hadoop:hadoop-common" ] }, + "org.apache.derby:derby": { + "locked": "10.10.2.0", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.apache.directory.api:api-asn1-api": { "locked": "1.0.0-M20", "transitive": [ @@ -2216,6 +2788,7 @@ "transitive": [ "org.apache.hadoop:hadoop-client", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-api", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", @@ -2226,7 +2799,8 @@ "org.apache.hadoop:hadoop-auth": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-client" ] }, "org.apache.hadoop:hadoop-client": { @@ -2236,7 +2810,9 @@ "locked": "2.7.3", "transitive": [ "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common" ] }, "org.apache.hadoop:hadoop-hdfs": { @@ -2263,7 +2839,9 @@ "locked": "2.7.3", "transitive": [ "org.apache.hadoop:hadoop-client", - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common" ] }, "org.apache.hadoop:hadoop-mapreduce-client-jobclient": { @@ -2346,6 +2924,33 @@ "org.apache.hadoop:hadoop-yarn-server-resourcemanager" ] }, + "org.apache.hbase:hbase-annotations": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol" + ] + }, + "org.apache.hbase:hbase-client": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hbase:hbase-common": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client" + ] + }, + "org.apache.hbase:hbase-protocol": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common" + ] + }, "org.apache.hive.shims:hive-shims-0.23": { "locked": "2.3.7", "transitive": [ @@ -2372,9 +2977,18 @@ "org.apache.hive:hive-serde" ] }, - "org.apache.hive:hive-serde": { + "org.apache.hive:hive-exec": { + "locked": "2.3.7" + }, + "org.apache.hive:hive-metastore": { "locked": "2.3.7" }, + "org.apache.hive:hive-serde": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.apache.hive:hive-service-rpc": { "locked": "2.3.7", "transitive": [ @@ -2385,6 +2999,8 @@ "locked": "2.3.7", "transitive": [ "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde" ] }, @@ -2394,11 +3010,19 @@ "org.apache.hive:hive-common" ] }, + "org.apache.hive:hive-vector-code-gen": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.htrace:htrace-core": { "locked": "3.1.0-incubating", "transitive": [ "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common" ] }, "org.apache.httpcomponents:httpclient": { @@ -2417,6 +3041,12 @@ "org.apache.thrift:libthrift" ] }, + "org.apache.ivy:ivy": { + "locked": "2.4.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.logging.log4j:log4j-1.2-api": { "locked": "2.6.2", "transitive": [ @@ -2467,21 +3097,76 @@ "org.apache.thrift:libfb303": { "locked": "0.9.3", "transitive": [ + "org.apache.hive:hive-metastore", "org.apache.hive:hive-service-rpc" ] }, "org.apache.thrift:libthrift": { "locked": "0.9.3", "transitive": [ + "co.cask.tephra:tephra-core", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", "org.apache.hive:hive-service-rpc", "org.apache.thrift:libfb303" ] }, + "org.apache.twill:twill-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-common": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-api", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "org.apache.twill:twill-discovery-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.twill:twill-discovery-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core" + ] + }, + "org.apache.twill:twill-zookeeper": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.velocity:velocity": { + "locked": "1.5", + "transitive": [ + "org.apache.hive:hive-vector-code-gen" + ] + }, "org.apache.zookeeper:zookeeper": { "locked": "3.4.6", "transitive": [ + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", @@ -2489,7 +3174,16 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.codehaus.groovy:groovy-all": { + "locked": "2.4.4", + "transitive": [ + "org.apache.hive:hive-exec" ] }, "org.codehaus.jackson:jackson-core-asl": { @@ -2520,6 +3214,7 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-client", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-xc" ] @@ -2540,6 +3235,31 @@ "org.apache.hadoop:hadoop-yarn-server-resourcemanager" ] }, + "org.datanucleus:datanucleus-api-jdo": { + "locked": "4.2.4", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-core": { + "locked": "4.1.17", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-rdbms": { + "locked": "4.1.19", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:javax.jdo": { + "locked": "3.2.0-m3", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.eclipse.jetty.aggregate:jetty-all": { "locked": "7.6.0.v20120127", "transitive": [ @@ -2563,16 +3283,45 @@ "org.apache.hadoop:hadoop-yarn-server-resourcemanager" ] }, + "org.hamcrest:hamcrest-core": { + "locked": "1.3", + "transitive": [ + "junit:junit" + ] + }, + "org.jruby.jcodings:jcodings": { + "locked": "1.0.8", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.jruby.joni:joni" + ] + }, + "org.jruby.joni:joni": { + "locked": "2.1.2", + "transitive": [ + "org.apache.hbase:hbase-client" + ] + }, "org.openjdk.jol:jol-core": { "locked": "0.2", "transitive": [ "io.airlift:slice" ] }, + "org.ow2.asm:asm-all": { + "locked": "5.0.2", + "transitive": [ + "org.apache.twill:twill-core" + ] + }, "org.slf4j:slf4j-api": { "locked": "1.7.21", "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", + "com.jolbox:bonecp", + "com.zaxxer:HikariCP", "io.dropwizard.metrics:metrics-core", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", @@ -2596,13 +3345,19 @@ "org.apache.hive.shims:hive-shims-common", "org.apache.hive.shims:hive-shims-scheduler", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", "org.apache.hive:hive-service-rpc", "org.apache.hive:hive-shims", "org.apache.hive:hive-storage-api", + "org.apache.hive:hive-vector-code-gen", "org.apache.logging.log4j:log4j-slf4j-impl", "org.apache.orc:orc-core", "org.apache.thrift:libthrift", + "org.apache.twill:twill-common", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", "org.apache.zookeeper:zookeeper" ] }, @@ -2618,6 +3373,18 @@ "org.apache.avro:avro" ] }, + "oro:oro": { + "locked": "2.0.8", + "transitive": [ + "org.apache.velocity:velocity" + ] + }, + "stax:stax-api": { + "locked": "1.0.1", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "tomcat:jasper-compiler": { "locked": "5.5.23", "transitive": [ @@ -3648,10 +4415,66 @@ "asm:asm-commons" ] }, + "ch.qos.logback:logback-classic": { + "locked": "1.0.9", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "ch.qos.logback:logback-core": { + "locked": "1.0.9", + "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "co.cask.tephra:tephra-api": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-core": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-hbase-compat-1.0": { + "locked": "0.6.0", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "com.beust:jcommander": { + "locked": "1.30", + "transitive": [ + "org.apache.slider:slider-core" + ] + }, + "com.esotericsoftware.kryo:kryo": { + "locked": "2.24.0", + "requested": "2.24.0" + }, + "com.esotericsoftware.minlog:minlog": { + "locked": "1.2", + "transitive": [ + "com.esotericsoftware.kryo:kryo" + ] + }, "com.fasterxml.jackson.core:jackson-annotations": { "locked": "2.10.2", + "requested": "2.6.5", "transitive": [ - "com.fasterxml.jackson.core:jackson-databind" + "com.fasterxml.jackson.core:jackson-databind", + "org.apache.calcite.avatica:avatica" ] }, "com.fasterxml.jackson.core:jackson-core": { @@ -3659,6 +4482,7 @@ "transitive": [ "com.fasterxml.jackson.core:jackson-databind", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", "org.apache.iceberg:iceberg-core" ] }, @@ -3667,6 +4491,7 @@ "transitive": [ "io.dropwizard.metrics:metrics-json", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", "org.apache.hive:hive-common", "org.apache.iceberg:iceberg-core" ] @@ -3686,6 +4511,15 @@ "com.github.stephenc.findbugs:findbugs-annotations": { "locked": "1.3.9-1", "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -3697,14 +4531,26 @@ "com.google.code.findbugs:jsr305": { "locked": "3.0.0", "transitive": [ + "org.apache.calcite:calcite-core", "org.apache.hadoop:hadoop-common", - "org.apache.hive:hive-serde" + "org.apache.hive:hive-serde", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-internals", + "org.apache.twill:twill-api", + "org.apache.twill:twill-common", + "org.apache.twill:twill-zookeeper" ] }, "com.google.code.gson:gson": { "locked": "2.2.4", "transitive": [ - "org.apache.hadoop:hadoop-common" + "co.cask.tephra:tephra-core", + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" ] }, "com.google.errorprone:error_prone_annotations": { @@ -3714,8 +4560,14 @@ ] }, "com.google.guava:guava": { - "locked": "16.0.1", - "transitive": [ + "locked": "18.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "com.jolbox:bonecp", + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid", + "org.apache.calcite:calcite-linq4j", + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", @@ -3729,21 +4581,51 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", "org.apache.hadoop:hadoop-yarn-server-web-proxy", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-vector-code-gen", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", + "org.reflections:reflections" + ] + }, + "com.google.inject.extensions:guice-assistedinject": { + "locked": "3.0", + "transitive": [ + "co.cask.tephra:tephra-core" ] }, "com.google.inject.extensions:guice-servlet": { "locked": "3.0", "transitive": [ "com.sun.jersey.contribs:jersey-guice", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.slider:slider-core" ] }, "com.google.inject:guice": { "locked": "3.0", "transitive": [ + "co.cask.tephra:tephra-core", + "com.google.inject.extensions:guice-assistedinject", "com.google.inject.extensions:guice-servlet", "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-yarn-common", @@ -3753,8 +4635,9 @@ ] }, "com.google.protobuf:protobuf-java": { - "locked": "2.5.0", + "locked": "3.0.0-beta-1", "transitive": [ + "org.apache.calcite.avatica:avatica", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-mapreduce-client-app", @@ -3768,7 +4651,19 @@ "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.orc:orc-core" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-metastore", + "org.apache.orc:orc-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "com.jamesmurty.utils:java-xmlbuilder": { @@ -3783,6 +4678,28 @@ "org.apache.hadoop:hadoop-common" ] }, + "com.jolbox:bonecp": { + "locked": "0.8.0.RELEASE", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "com.klarna:hiverunner": { + "locked": "5.2.1", + "requested": "5.2.1" + }, + "com.lmax:disruptor": { + "locked": "3.3.0", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "com.ning:async-http-client": { + "locked": "1.8.16", + "transitive": [ + "org.apache.tez:tez-runtime-library" + ] + }, "com.sun.jersey.contribs:jersey-guice": { "locked": "1.9", "transitive": [ @@ -3798,7 +4715,9 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "com.sun.jersey:jersey-core": { @@ -3808,10 +4727,12 @@ "com.sun.jersey:jersey-json", "com.sun.jersey:jersey-server", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-server" ] }, "com.sun.jersey:jersey-json": { @@ -3821,7 +4742,9 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "com.sun.jersey:jersey-server": { @@ -3829,7 +4752,10 @@ "transitive": [ "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" ] }, "com.sun.xml.bind:jaxb-impl": { @@ -3841,7 +4767,21 @@ "com.tdunning:json": { "locked": "1.8", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server" + ] + }, + "com.yammer.metrics:metrics-core": { + "locked": "2.2.0", + "transitive": [ + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server" + ] + }, + "com.zaxxer:HikariCP": { + "locked": "2.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" ] }, "commons-beanutils:commons-beanutils": { @@ -3863,8 +4803,13 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", - "org.apache.hive:hive-service-rpc" + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-service", + "org.apache.hive:hive-service-rpc", + "org.apache.tez:tez-dag" ] }, "commons-codec:commons-codec": { @@ -3877,9 +4822,17 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-llap-server", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", - "org.apache.httpcomponents:httpclient" + "org.apache.httpcomponents:httpclient", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-library" ] }, "commons-collections:commons-collections": { @@ -3887,7 +4840,10 @@ "transitive": [ "commons-configuration:commons-configuration", "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice" + "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.tez:tez-mapreduce" ] }, "commons-configuration:commons-configuration": { @@ -3896,6 +4852,19 @@ "org.apache.hadoop:hadoop-common" ] }, + "commons-daemon:commons-daemon": { + "locked": "1.0.13", + "transitive": [ + "org.apache.hadoop:hadoop-hdfs" + ] + }, + "commons-dbcp:commons-dbcp": { + "locked": "1.4", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.apache.hive:hive-metastore" + ] + }, "commons-digester:commons-digester": { "locked": "1.8", "transitive": [ @@ -3911,7 +4880,10 @@ "commons-httpclient:commons-httpclient": { "locked": "3.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core" ] }, "commons-io:commons-io": { @@ -3920,7 +4892,13 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "commons-lang:commons-lang": { @@ -3934,12 +4912,27 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server", "org.apache.hive.shims:hive-shims-0.23", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-storage-api", - "org.apache.orc:orc-core" + "org.apache.hive:hive-vector-code-gen", + "org.apache.orc:orc-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-library", + "org.apache.velocity:velocity" ] }, "commons-logging:commons-logging": { @@ -3952,6 +4945,7 @@ "commons-el:commons-el", "commons-httpclient:commons-httpclient", "net.java.dev.jets3t:jets3t", + "net.sf.jpam:jpam", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-api", @@ -3962,7 +4956,16 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", "org.apache.hadoop:hadoop-yarn-server-web-proxy", - "org.apache.httpcomponents:httpclient" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.httpcomponents:httpclient", + "org.apache.slider:slider-core" ] }, "commons-net:commons-net": { @@ -3974,9 +4977,17 @@ "commons-pool:commons-pool": { "locked": "1.6", "transitive": [ + "commons-dbcp:commons-dbcp", + "org.apache.hive:hive-metastore", "org.apache.parquet:parquet-hadoop" ] }, + "dom4j:dom4j": { + "locked": "1.6.1", + "transitive": [ + "org.reflections:reflections" + ] + }, "io.airlift:aircompressor": { "locked": "0.15", "transitive": [ @@ -3986,6 +4997,7 @@ "io.dropwizard.metrics:metrics-core": { "locked": "3.1.2", "transitive": [ + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", @@ -4005,16 +5017,41 @@ ] }, "io.netty:netty": { - "locked": "3.7.0.Final", + "locked": "3.9.2.Final", "transitive": [ + "com.ning:async-http-client", "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hive:hive-llap-server", "org.apache.zookeeper:zookeeper" ] }, "io.netty:netty-all": { "locked": "4.0.23.Final", "transitive": [ - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-server" + ] + }, + "it.unimi.dsi:fastutil": { + "locked": "6.5.6", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "jakarta.jms:jakarta.jms-api": { + "locked": "2.0.2", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions" + ] + }, + "javassist:javassist": { + "locked": "3.12.1.GA", + "transitive": [ + "org.reflections:reflections" ] }, "javax.activation:activation": { @@ -4037,6 +5074,12 @@ "com.sun.jersey.contribs:jersey-guice" ] }, + "javax.jdo:jdo-api": { + "locked": "3.0.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "javax.mail:mail": { "locked": "1.4.1", "transitive": [ @@ -4046,7 +5089,8 @@ "javax.servlet.jsp:jsp-api": { "locked": "2.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.slider:slider-core" ] }, "javax.servlet:jsp-api": { @@ -4060,13 +5104,30 @@ "transitive": [ "javax.servlet:jsp-api", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.slider:slider-core", + "org.apache.tez:tez-dag", + "org.eclipse.jetty.aggregate:jetty-all", "tomcat:jasper-runtime" ] }, - "javax.xml.bind:jaxb-api": { - "locked": "2.2.11", + "javax.transaction:jta": { + "locked": "1.1", + "transitive": [ + "javax.jdo:jdo-api" + ] + }, + "javax.transaction:transaction-api": { + "locked": "1.1", + "transitive": [ + "org.datanucleus:javax.jdo" + ] + }, + "javax.xml.bind:jaxb-api": { + "locked": "2.2.11", "transitive": [ "com.sun.xml.bind:jaxb-impl", "org.apache.hadoop:hadoop-yarn-common", @@ -4076,9 +5137,16 @@ "org.apache.orc:orc-core" ] }, + "javolution:javolution": { + "locked": "5.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "jline:jline": { "locked": "2.12", "transitive": [ + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", "org.apache.zookeeper:zookeeper" ] @@ -4086,11 +5154,23 @@ "joda-time:joda-time": { "locked": "2.8.1", "transitive": [ + "org.apache.calcite:calcite-druid", "org.apache.hive:hive-common" ] }, "junit:junit": { - "locked": "4.12" + "locked": "4.12", + "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server" + ] }, "log4j:log4j": { "locked": "1.2.17", @@ -4101,25 +5181,62 @@ "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core", "org.apache.zookeeper:zookeeper" ] }, + "net.hydromatic:eigenbase-properties": { + "locked": "1.1.5", + "transitive": [ + "org.apache.calcite:calcite-core" + ] + }, "net.java.dev.jets3t:jets3t": { "locked": "0.9.0", "transitive": [ "org.apache.hadoop:hadoop-common" ] }, + "net.sf.jpam:jpam": { + "locked": "1.1", + "transitive": [ + "org.apache.hive:hive-service" + ] + }, "net.sf.opencsv:opencsv": { "locked": "2.3", "transitive": [ "org.apache.hive:hive-serde" ] }, + "org.antlr:ST4": { + "locked": "4.0.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.antlr:antlr-runtime": { + "locked": "3.5.2", + "transitive": [ + "org.antlr:ST4", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, "org.apache.ant:ant": { "locked": "1.9.1", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-vector-code-gen" ] }, "org.apache.ant:ant-launcher": { @@ -4132,8 +5249,51 @@ "locked": "1.9.2", "transitive": [ "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hive:hive-llap-server", "org.apache.hive:hive-serde", - "org.apache.iceberg:iceberg-core" + "org.apache.iceberg:iceberg-core", + "org.apache.slider:slider-core" + ] + }, + "org.apache.calcite.avatica:avatica": { + "locked": "1.8.0", + "transitive": [ + "org.apache.calcite:calcite-core" + ] + }, + "org.apache.calcite.avatica:avatica-metrics": { + "locked": "1.8.0", + "transitive": [ + "org.apache.calcite.avatica:avatica" + ] + }, + "org.apache.calcite:calcite-core": { + "locked": "1.10.0", + "transitive": [ + "org.apache.calcite:calcite-druid", + "org.apache.hive:hive-exec" + ] + }, + "org.apache.calcite:calcite-druid": { + "locked": "1.10.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.apache.calcite:calcite-linq4j": { + "locked": "1.10.0", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid" + ] + }, + "org.apache.commons:commons-collections4": { + "locked": "4.1", + "transitive": [ + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag" ] }, "org.apache.commons:commons-compress": { @@ -4142,26 +5302,49 @@ "org.apache.avro:avro", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core" ] }, "org.apache.commons:commons-lang3": { - "locked": "3.1", + "locked": "3.2", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.calcite:calcite-core", + "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-tez" + ] + }, + "org.apache.commons:commons-math": { + "locked": "2.2", + "transitive": [ + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server" ] }, "org.apache.commons:commons-math3": { "locked": "3.1.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.tez:tez-dag" + ] + }, + "org.apache.curator:apache-curator": { + "locked": "2.7.1", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-llap-client" ] }, "org.apache.curator:curator-client": { "locked": "2.7.1", "transitive": [ "org.apache.curator:curator-framework", - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.slider:slider-core" ] }, "org.apache.curator:curator-framework": { @@ -4169,13 +5352,27 @@ "transitive": [ "org.apache.curator:curator-recipes", "org.apache.hadoop:hadoop-auth", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-service", + "org.apache.slider:slider-core" ] }, "org.apache.curator:curator-recipes": { "locked": "2.7.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-service", + "org.apache.slider:slider-core" + ] + }, + "org.apache.derby:derby": { + "locked": "10.10.2.0", + "transitive": [ + "org.apache.hive:hive-metastore" ] }, "org.apache.directory.api:api-asn1-api": { @@ -4225,33 +5422,77 @@ "transitive": [ "org.apache.hadoop:hadoop-client", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-api", + "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-common", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.hadoop:hadoop-archives": { + "locked": "2.7.2", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core" ] }, "org.apache.hadoop:hadoop-auth": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.tez:tez-api", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-client": { - "locked": "2.7.3" + "locked": "2.7.3", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" + ] }, "org.apache.hadoop:hadoop-common": { "locked": "2.7.3", "transitive": [ "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-hdfs": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "org.apache.hadoop:hadoop-mapreduce-client-app": { @@ -4265,14 +5506,22 @@ "transitive": [ "org.apache.hadoop:hadoop-mapreduce-client-app", "org.apache.hadoop:hadoop-mapreduce-client-jobclient", - "org.apache.hadoop:hadoop-mapreduce-client-shuffle" + "org.apache.hadoop:hadoop-mapreduce-client-shuffle", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-mapreduce-client-core": { "locked": "2.7.3", "transitive": [ "org.apache.hadoop:hadoop-client", - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-webhcat-java-client", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-mapreduce-client-jobclient": { @@ -4294,17 +5543,28 @@ "org.apache.hadoop:hadoop-client", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hadoop:hadoop-yarn-server-web-proxy" + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-yarn-client": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-yarn-common": { @@ -4313,11 +5573,24 @@ "org.apache.hadoop:hadoop-mapreduce-client-common", "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-client", + "org.apache.hadoop:hadoop-yarn-registry", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hadoop:hadoop-yarn-server-web-proxy" + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.hadoop:hadoop-yarn-registry": { + "locked": "2.7.1", + "transitive": [ + "org.apache.slider:slider-core" ] }, "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice": { @@ -4352,7 +5625,98 @@ "org.apache.hadoop:hadoop-yarn-server-web-proxy": { "locked": "2.7.2", "transitive": [ - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.tez:tez-dag" + ] + }, + "org.apache.hbase:hbase-annotations": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-protocol" + ] + }, + "org.apache.hbase:hbase-client": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hbase:hbase-common": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-hadoop-compat": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-hadoop2-compat": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-prefix-tree": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-procedure": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-protocol": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-server": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive.hcatalog:hive-hcatalog-core": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client" + ] + }, + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-webhcat-java-client" + ] + }, + "org.apache.hive.hcatalog:hive-webhcat-java-client": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner" ] }, "org.apache.hive.shims:hive-shims-0.23": { @@ -4375,25 +5739,105 @@ "org.apache.hive:hive-shims" ] }, + "org.apache.hive:hive-cli": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core" + ] + }, "org.apache.hive:hive-common": { "locked": "2.3.7", "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez", "org.apache.hive:hive-serde" ] }, - "org.apache.hive:hive-serde": { + "org.apache.hive:hive-exec": { "locked": "2.3.7" }, + "org.apache.hive:hive-jdbc": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.hive:hive-llap-client": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez" + ] + }, + "org.apache.hive:hive-llap-common": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive:hive-llap-server": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-service" + ] + }, + "org.apache.hive:hive-llap-tez": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive:hive-metastore": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-service" + ] + }, + "org.apache.hive:hive-serde": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hive:hive-service": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc" + ] + }, "org.apache.hive:hive-service-rpc": { "locked": "2.3.7", "transitive": [ - "org.apache.hive:hive-serde" + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-serde", + "org.apache.hive:hive-service" ] }, "org.apache.hive:hive-shims": { "locked": "2.3.7", "transitive": [ + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde" ] }, @@ -4403,26 +5847,41 @@ "org.apache.hive:hive-common" ] }, + "org.apache.hive:hive-vector-code-gen": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.htrace:htrace-core": { "locked": "3.1.0-incubating", "transitive": [ "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server" ] }, "org.apache.httpcomponents:httpclient": { - "locked": "4.4.1", + "locked": "4.5.2", "transitive": [ "net.java.dev.jets3t:jets3t", + "org.apache.calcite.avatica:avatica", "org.apache.hadoop:hadoop-auth", + "org.apache.hive:hive-jdbc", + "org.apache.slider:slider-core", "org.apache.thrift:libthrift" ] }, "org.apache.httpcomponents:httpcore": { - "locked": "4.4.1", + "locked": "4.4.4", "transitive": [ "net.java.dev.jets3t:jets3t", + "org.apache.calcite.avatica:avatica", + "org.apache.hive:hive-jdbc", "org.apache.httpcomponents:httpclient", + "org.apache.slider:slider-core", "org.apache.thrift:libthrift" ] }, @@ -4465,6 +5924,12 @@ "org.apache.iceberg:iceberg-parquet": { "project": true }, + "org.apache.ivy:ivy": { + "locked": "2.4.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.logging.log4j:log4j-1.2-api": { "locked": "2.6.2", "transitive": [ @@ -4504,6 +5969,7 @@ "locked": "1.6.3", "transitive": [ "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server", "org.apache.iceberg:iceberg-orc" ] }, @@ -4565,21 +6031,139 @@ "org.apache.parquet:parquet-hadoop" ] }, + "org.apache.slider:slider-core": { + "locked": "0.90.2-incubating", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.tez:hadoop-shim": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-internals" + ] + }, + "org.apache.tez:tez-api": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.tez:tez-common": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.tez:tez-dag": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.tez:tez-mapreduce": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.tez:tez-runtime-internals": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-dag" + ] + }, + "org.apache.tez:tez-runtime-library": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce" + ] + }, "org.apache.thrift:libfb303": { "locked": "0.9.3", "transitive": [ + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, "org.apache.thrift:libthrift": { "locked": "0.9.3", "transitive": [ + "co.cask.tephra:tephra-core", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", "org.apache.thrift:libfb303" ] }, + "org.apache.twill:twill-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-common": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-api", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "org.apache.twill:twill-discovery-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.twill:twill-discovery-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core" + ] + }, + "org.apache.twill:twill-zookeeper": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.velocity:velocity": { + "locked": "1.5", + "transitive": [ + "org.apache.hive:hive-vector-code-gen" + ] + }, "org.apache.yetus:audience-annotations": { "locked": "0.11.0", "transitive": [ @@ -4589,6 +6173,7 @@ "org.apache.zookeeper:zookeeper": { "locked": "3.4.6", "transitive": [ + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", @@ -4596,7 +6181,24 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.slider:slider-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apiguardian:apiguardian-api": { + "locked": "1.1.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.jupiter:junit-jupiter-params", + "org.junit.platform:junit-platform-commons", + "org.junit.platform:junit-platform-engine" ] }, "org.checkerframework:checker-qual": { @@ -4605,6 +6207,12 @@ "com.github.ben-manes.caffeine:caffeine" ] }, + "org.codehaus.groovy:groovy-all": { + "locked": "2.4.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.codehaus.jackson:jackson-core-asl": { "locked": "1.9.13", "transitive": [ @@ -4612,6 +6220,9 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-mapper-asl", "org.codehaus.jackson:jackson-xc" @@ -4621,7 +6232,9 @@ "locked": "1.9.13", "transitive": [ "com.sun.jersey:jersey-json", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" ] }, "org.codehaus.jackson:jackson-mapper-asl": { @@ -4631,6 +6244,12 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.slider:slider-core", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-xc" ] @@ -4639,7 +6258,21 @@ "locked": "1.9.13", "transitive": [ "com.sun.jersey:jersey-json", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.slider:slider-core" + ] + }, + "org.codehaus.janino:commons-compiler": { + "locked": "2.7.6", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.codehaus.janino:janino" + ] + }, + "org.codehaus.janino:janino": { + "locked": "2.7.6", + "transitive": [ + "org.apache.calcite:calcite-core" ] }, "org.codehaus.jettison:jettison": { @@ -4651,10 +6284,36 @@ "org.apache.hadoop:hadoop-yarn-server-resourcemanager" ] }, + "org.datanucleus:datanucleus-api-jdo": { + "locked": "4.2.4", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-core": { + "locked": "4.1.17", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-rdbms": { + "locked": "4.1.19", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:javax.jdo": { + "locked": "3.2.0-m3", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.eclipse.jetty.aggregate:jetty-all": { "locked": "7.6.0.v20120127", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-service" ] }, "org.eclipse.jetty.orbit:javax.servlet": { @@ -4681,29 +6340,124 @@ "org.mockito:mockito-core" ] }, + "org.jamon:jamon-runtime": { + "locked": "2.3.1", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service" + ] + }, "org.jetbrains:annotations": { "locked": "17.0.0", "transitive": [ "org.apache.orc:orc-core" ] }, + "org.jruby.jcodings:jcodings": { + "locked": "1.0.8", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.jruby.joni:joni" + ] + }, + "org.jruby.joni:joni": { + "locked": "2.1.2", + "transitive": [ + "org.apache.hbase:hbase-client" + ] + }, + "org.junit.jupiter:junit-jupiter": { + "locked": "5.6.0", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.junit.jupiter:junit-jupiter-api": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter", + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.jupiter:junit-jupiter-params" + ] + }, + "org.junit.jupiter:junit-jupiter-engine": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.jupiter:junit-jupiter-params": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.platform:junit-platform-commons": { + "locked": "1.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.junit.platform:junit-platform-engine": { + "locked": "1.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-engine" + ] + }, "org.mockito:mockito-core": { "locked": "1.10.19" }, "org.objenesis:objenesis": { "locked": "2.1", "transitive": [ + "com.esotericsoftware.kryo:kryo", "org.mockito:mockito-core" ] }, + "org.opentest4j:opentest4j": { + "locked": "1.2.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.ow2.asm:asm-all": { + "locked": "5.0.2", + "transitive": [ + "org.apache.twill:twill-core" + ] + }, + "org.reflections:reflections": { + "locked": "0.9.8", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.roaringbitmap:RoaringBitmap": { + "locked": "0.4.9", + "transitive": [ + "org.apache.tez:tez-runtime-library" + ] + }, "org.slf4j:slf4j-api": { "locked": "1.7.25", "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", + "com.jolbox:bonecp", + "com.ning:async-http-client", + "com.yammer.metrics:metrics-core", + "com.zaxxer:HikariCP", "io.dropwizard.metrics:metrics-core", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", + "org.apache.calcite.avatica:avatica-metrics", + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid", "org.apache.curator:curator-client", "org.apache.directory.api:api-asn1-api", "org.apache.directory.api:api-util", @@ -4719,14 +6473,27 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client", "org.apache.hive.shims:hive-shims-0.23", "org.apache.hive.shims:hive-shims-common", "org.apache.hive.shims:hive-shims-scheduler", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", "org.apache.hive:hive-shims", "org.apache.hive:hive-storage-api", + "org.apache.hive:hive-vector-code-gen", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -4738,7 +6505,16 @@ "org.apache.orc:orc-shims", "org.apache.parquet:parquet-common", "org.apache.parquet:parquet-format-structures", + "org.apache.slider:slider-core", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-library", "org.apache.thrift:libthrift", + "org.apache.twill:twill-common", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", "org.apache.zookeeper:zookeeper", "org.slf4j:slf4j-simple" ] @@ -4764,15 +6540,32 @@ "org.apache.parquet:parquet-hadoop" ] }, + "oro:oro": { + "locked": "2.0.8", + "transitive": [ + "org.apache.velocity:velocity" + ] + }, + "stax:stax-api": { + "locked": "1.0.1", + "transitive": [ + "org.apache.hive:hive-exec", + "org.codehaus.jettison:jettison" + ] + }, "tomcat:jasper-compiler": { "locked": "5.5.23", "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, "tomcat:jasper-runtime": { "locked": "5.5.23", "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, @@ -4785,6 +6578,7 @@ "xml-apis:xml-apis": { "locked": "1.3.04", "transitive": [ + "dom4j:dom4j", "xerces:xercesImpl" ] }, @@ -4829,10 +6623,66 @@ "asm:asm-commons" ] }, + "ch.qos.logback:logback-classic": { + "locked": "1.0.9", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "ch.qos.logback:logback-core": { + "locked": "1.0.9", + "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "co.cask.tephra:tephra-api": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-core": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-hbase-compat-1.0": { + "locked": "0.6.0", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "com.beust:jcommander": { + "locked": "1.30", + "transitive": [ + "org.apache.slider:slider-core" + ] + }, + "com.esotericsoftware.kryo:kryo": { + "locked": "2.24.0", + "requested": "2.24.0" + }, + "com.esotericsoftware.minlog:minlog": { + "locked": "1.2", + "transitive": [ + "com.esotericsoftware.kryo:kryo" + ] + }, "com.fasterxml.jackson.core:jackson-annotations": { "locked": "2.10.2", + "requested": "2.6.5", "transitive": [ - "com.fasterxml.jackson.core:jackson-databind" + "com.fasterxml.jackson.core:jackson-databind", + "org.apache.calcite.avatica:avatica" ] }, "com.fasterxml.jackson.core:jackson-core": { @@ -4840,6 +6690,7 @@ "transitive": [ "com.fasterxml.jackson.core:jackson-databind", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", "org.apache.iceberg:iceberg-core" ] }, @@ -4848,6 +6699,7 @@ "transitive": [ "io.dropwizard.metrics:metrics-json", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", "org.apache.hive:hive-common", "org.apache.iceberg:iceberg-core" ] @@ -4867,6 +6719,14 @@ "com.github.stephenc.findbugs:findbugs-annotations": { "locked": "1.3.9-1", "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -4878,14 +6738,26 @@ "com.google.code.findbugs:jsr305": { "locked": "3.0.0", "transitive": [ + "org.apache.calcite:calcite-core", "org.apache.hadoop:hadoop-common", - "org.apache.hive:hive-serde" + "org.apache.hive:hive-serde", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-internals", + "org.apache.twill:twill-api", + "org.apache.twill:twill-common", + "org.apache.twill:twill-zookeeper" ] }, "com.google.code.gson:gson": { "locked": "2.2.4", "transitive": [ - "org.apache.hadoop:hadoop-common" + "co.cask.tephra:tephra-core", + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" ] }, "com.google.errorprone:error_prone_annotations": { @@ -4895,8 +6767,14 @@ ] }, "com.google.guava:guava": { - "locked": "16.0.1", - "transitive": [ + "locked": "18.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "com.jolbox:bonecp", + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid", + "org.apache.calcite:calcite-linq4j", + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", @@ -4907,20 +6785,59 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-vector-code-gen", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", + "org.reflections:reflections" + ] + }, + "com.google.inject.extensions:guice-assistedinject": { + "locked": "3.0", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "com.google.inject.extensions:guice-servlet": { + "locked": "3.0", + "transitive": [ + "com.sun.jersey.contribs:jersey-guice", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.slider:slider-core" ] }, "com.google.inject:guice": { "locked": "3.0", "transitive": [ + "co.cask.tephra:tephra-core", + "com.google.inject.extensions:guice-assistedinject", + "com.google.inject.extensions:guice-servlet", "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager" ] }, "com.google.protobuf:protobuf-java": { - "locked": "2.5.0", + "locked": "3.0.0-beta-1", "transitive": [ + "org.apache.calcite.avatica:avatica", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-mapreduce-client-app", @@ -4932,7 +6849,19 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.orc:orc-core" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-metastore", + "org.apache.orc:orc-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "com.jamesmurty.utils:java-xmlbuilder": { @@ -4947,6 +6876,28 @@ "org.apache.hadoop:hadoop-common" ] }, + "com.jolbox:bonecp": { + "locked": "0.8.0.RELEASE", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "com.klarna:hiverunner": { + "locked": "5.2.1", + "requested": "5.2.1" + }, + "com.lmax:disruptor": { + "locked": "3.3.0", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "com.ning:async-http-client": { + "locked": "1.8.16", + "transitive": [ + "org.apache.tez:tez-runtime-library" + ] + }, "com.sun.jersey.contribs:jersey-guice": { "locked": "1.9", "transitive": [ @@ -4958,7 +6909,9 @@ "locked": "1.9", "transitive": [ "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hadoop:hadoop-yarn-server-nodemanager" + "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "com.sun.jersey:jersey-core": { @@ -4968,8 +6921,10 @@ "com.sun.jersey:jersey-json", "com.sun.jersey:jersey-server", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hadoop:hadoop-yarn-server-nodemanager" + "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-server" ] }, "com.sun.jersey:jersey-json": { @@ -4977,7 +6932,9 @@ "transitive": [ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hadoop:hadoop-yarn-server-nodemanager" + "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "com.sun.jersey:jersey-server": { @@ -4985,7 +6942,10 @@ "transitive": [ "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" ] }, "com.sun.xml.bind:jaxb-impl": { @@ -4997,7 +6957,21 @@ "com.tdunning:json": { "locked": "1.8", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server" + ] + }, + "com.yammer.metrics:metrics-core": { + "locked": "2.2.0", + "transitive": [ + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server" + ] + }, + "com.zaxxer:HikariCP": { + "locked": "2.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" ] }, "commons-beanutils:commons-beanutils": { @@ -5019,8 +6993,13 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", - "org.apache.hive:hive-service-rpc" + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-service", + "org.apache.hive:hive-service-rpc", + "org.apache.tez:tez-dag" ] }, "commons-codec:commons-codec": { @@ -5033,16 +7012,27 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-llap-server", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", - "org.apache.httpcomponents:httpclient" + "org.apache.httpcomponents:httpclient", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-library" ] }, "commons-collections:commons-collections": { "locked": "3.2.2", "transitive": [ "commons-configuration:commons-configuration", - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.tez:tez-mapreduce" ] }, "commons-configuration:commons-configuration": { @@ -5051,6 +7041,19 @@ "org.apache.hadoop:hadoop-common" ] }, + "commons-daemon:commons-daemon": { + "locked": "1.0.13", + "transitive": [ + "org.apache.hadoop:hadoop-hdfs" + ] + }, + "commons-dbcp:commons-dbcp": { + "locked": "1.4", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.apache.hive:hive-metastore" + ] + }, "commons-digester:commons-digester": { "locked": "1.8", "transitive": [ @@ -5066,7 +7069,10 @@ "commons-httpclient:commons-httpclient": { "locked": "3.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core" ] }, "commons-io:commons-io": { @@ -5074,7 +7080,13 @@ "transitive": [ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "commons-lang:commons-lang": { @@ -5087,11 +7099,26 @@ "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-storage-api", - "org.apache.orc:orc-core" + "org.apache.hive:hive-vector-code-gen", + "org.apache.orc:orc-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-library", + "org.apache.velocity:velocity" ] }, "commons-logging:commons-logging": { @@ -5104,6 +7131,7 @@ "commons-el:commons-el", "commons-httpclient:commons-httpclient", "net.java.dev.jets3t:jets3t", + "net.sf.jpam:jpam", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-api", @@ -5111,7 +7139,16 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.httpcomponents:httpclient" + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.httpcomponents:httpclient", + "org.apache.slider:slider-core" ] }, "commons-net:commons-net": { @@ -5123,9 +7160,17 @@ "commons-pool:commons-pool": { "locked": "1.6", "transitive": [ + "commons-dbcp:commons-dbcp", + "org.apache.hive:hive-metastore", "org.apache.parquet:parquet-hadoop" ] }, + "dom4j:dom4j": { + "locked": "1.6.1", + "transitive": [ + "org.reflections:reflections" + ] + }, "io.airlift:aircompressor": { "locked": "0.15", "transitive": [ @@ -5135,6 +7180,7 @@ "io.dropwizard.metrics:metrics-core": { "locked": "3.1.2", "transitive": [ + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", @@ -5154,16 +7200,40 @@ ] }, "io.netty:netty": { - "locked": "3.7.0.Final", + "locked": "3.9.2.Final", "transitive": [ + "com.ning:async-http-client", "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hive:hive-llap-server", "org.apache.zookeeper:zookeeper" ] }, "io.netty:netty-all": { "locked": "4.0.23.Final", "transitive": [ - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server" + ] + }, + "it.unimi.dsi:fastutil": { + "locked": "6.5.6", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "jakarta.jms:jakarta.jms-api": { + "locked": "2.0.2", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions" + ] + }, + "javassist:javassist": { + "locked": "3.12.1.GA", + "transitive": [ + "org.reflections:reflections" ] }, "javax.activation:activation": { @@ -5186,6 +7256,12 @@ "com.sun.jersey.contribs:jersey-guice" ] }, + "javax.jdo:jdo-api": { + "locked": "3.0.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "javax.mail:mail": { "locked": "1.4.1", "transitive": [ @@ -5203,11 +7279,28 @@ "transitive": [ "javax.servlet:jsp-api", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.slider:slider-core", + "org.apache.tez:tez-dag", + "org.eclipse.jetty.aggregate:jetty-all", "tomcat:jasper-runtime" ] }, + "javax.transaction:jta": { + "locked": "1.1", + "transitive": [ + "javax.jdo:jdo-api" + ] + }, + "javax.transaction:transaction-api": { + "locked": "1.1", + "transitive": [ + "org.datanucleus:javax.jdo" + ] + }, "javax.xml.bind:jaxb-api": { "locked": "2.2.11", "transitive": [ @@ -5217,9 +7310,16 @@ "org.apache.orc:orc-core" ] }, + "javolution:javolution": { + "locked": "5.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "jline:jline": { "locked": "2.12", "transitive": [ + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", "org.apache.zookeeper:zookeeper" ] @@ -5227,11 +7327,22 @@ "joda-time:joda-time": { "locked": "2.8.1", "transitive": [ + "org.apache.calcite:calcite-druid", "org.apache.hive:hive-common" ] }, "junit:junit": { - "locked": "4.12" + "locked": "4.12", + "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server" + ] }, "log4j:log4j": { "locked": "1.2.17", @@ -5240,25 +7351,60 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", "org.apache.zookeeper:zookeeper" ] }, + "net.hydromatic:eigenbase-properties": { + "locked": "1.1.5", + "transitive": [ + "org.apache.calcite:calcite-core" + ] + }, "net.java.dev.jets3t:jets3t": { "locked": "0.9.0", "transitive": [ "org.apache.hadoop:hadoop-common" ] }, + "net.sf.jpam:jpam": { + "locked": "1.1", + "transitive": [ + "org.apache.hive:hive-service" + ] + }, "net.sf.opencsv:opencsv": { "locked": "2.3", "transitive": [ "org.apache.hive:hive-serde" ] }, + "org.antlr:ST4": { + "locked": "4.0.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.antlr:antlr-runtime": { + "locked": "3.5.2", + "transitive": [ + "org.antlr:ST4", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, "org.apache.ant:ant": { "locked": "1.9.1", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-vector-code-gen" ] }, "org.apache.ant:ant-launcher": { @@ -5271,8 +7417,51 @@ "locked": "1.9.2", "transitive": [ "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hive:hive-llap-server", "org.apache.hive:hive-serde", - "org.apache.iceberg:iceberg-core" + "org.apache.iceberg:iceberg-core", + "org.apache.slider:slider-core" + ] + }, + "org.apache.calcite.avatica:avatica": { + "locked": "1.8.0", + "transitive": [ + "org.apache.calcite:calcite-core" + ] + }, + "org.apache.calcite.avatica:avatica-metrics": { + "locked": "1.8.0", + "transitive": [ + "org.apache.calcite.avatica:avatica" + ] + }, + "org.apache.calcite:calcite-core": { + "locked": "1.10.0", + "transitive": [ + "org.apache.calcite:calcite-druid", + "org.apache.hive:hive-exec" + ] + }, + "org.apache.calcite:calcite-druid": { + "locked": "1.10.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.apache.calcite:calcite-linq4j": { + "locked": "1.10.0", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid" + ] + }, + "org.apache.commons:commons-collections4": { + "locked": "4.1", + "transitive": [ + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag" ] }, "org.apache.commons:commons-compress": { @@ -5281,26 +7470,49 @@ "org.apache.avro:avro", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core" ] }, "org.apache.commons:commons-lang3": { - "locked": "3.1", + "locked": "3.2", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.calcite:calcite-core", + "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-tez" + ] + }, + "org.apache.commons:commons-math": { + "locked": "2.2", + "transitive": [ + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server" ] }, "org.apache.commons:commons-math3": { "locked": "3.1.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.tez:tez-dag" + ] + }, + "org.apache.curator:apache-curator": { + "locked": "2.7.1", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-llap-client" ] }, "org.apache.curator:curator-client": { "locked": "2.7.1", "transitive": [ "org.apache.curator:curator-framework", - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.slider:slider-core" ] }, "org.apache.curator:curator-framework": { @@ -5308,13 +7520,27 @@ "transitive": [ "org.apache.curator:curator-recipes", "org.apache.hadoop:hadoop-auth", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-service", + "org.apache.slider:slider-core" ] }, "org.apache.curator:curator-recipes": { "locked": "2.7.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-service", + "org.apache.slider:slider-core" + ] + }, + "org.apache.derby:derby": { + "locked": "10.10.2.0", + "transitive": [ + "org.apache.hive:hive-metastore" ] }, "org.apache.directory.api:api-asn1-api": { @@ -5363,29 +7589,74 @@ "locked": "2.7.3", "transitive": [ "org.apache.hadoop:hadoop-client", - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hadoop:hadoop-yarn-api", + "org.apache.hadoop:hadoop-yarn-client", + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-server-common", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.hadoop:hadoop-archives": { + "locked": "2.7.2", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core" ] }, "org.apache.hadoop:hadoop-auth": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.tez:tez-api", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-client": { - "locked": "2.7.3" + "locked": "2.7.3", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" + ] }, "org.apache.hadoop:hadoop-common": { "locked": "2.7.3", "transitive": [ "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-hdfs": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.slider:slider-core" ] }, "org.apache.hadoop:hadoop-mapreduce-client-app": { @@ -5399,14 +7670,22 @@ "transitive": [ "org.apache.hadoop:hadoop-mapreduce-client-app", "org.apache.hadoop:hadoop-mapreduce-client-jobclient", - "org.apache.hadoop:hadoop-mapreduce-client-shuffle" + "org.apache.hadoop:hadoop-mapreduce-client-shuffle", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-mapreduce-client-core": { "locked": "2.7.3", "transitive": [ "org.apache.hadoop:hadoop-client", - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-webhcat-java-client", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-mapreduce-client-jobclient": { @@ -5428,14 +7707,26 @@ "org.apache.hadoop:hadoop-client", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", "org.apache.hadoop:hadoop-yarn-server-common", - "org.apache.hadoop:hadoop-yarn-server-nodemanager" + "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-yarn-client": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-yarn-common": { @@ -5444,8 +7735,22 @@ "org.apache.hadoop:hadoop-mapreduce-client-common", "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-client", + "org.apache.hadoop:hadoop-yarn-registry", "org.apache.hadoop:hadoop-yarn-server-common", - "org.apache.hadoop:hadoop-yarn-server-nodemanager" + "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.hadoop:hadoop-yarn-registry": { + "locked": "2.7.1", + "transitive": [ + "org.apache.slider:slider-core" ] }, "org.apache.hadoop:hadoop-yarn-server-common": { @@ -5453,7 +7758,8 @@ "transitive": [ "org.apache.hadoop:hadoop-mapreduce-client-common", "org.apache.hadoop:hadoop-mapreduce-client-shuffle", - "org.apache.hadoop:hadoop-yarn-server-nodemanager" + "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hadoop:hadoop-yarn-server-web-proxy" ] }, "org.apache.hadoop:hadoop-yarn-server-nodemanager": { @@ -5462,31 +7768,198 @@ "org.apache.hadoop:hadoop-mapreduce-client-shuffle" ] }, + "org.apache.hadoop:hadoop-yarn-server-web-proxy": { + "locked": "2.7.0", + "transitive": [ + "org.apache.tez:tez-dag" + ] + }, + "org.apache.hbase:hbase-annotations": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-protocol" + ] + }, + "org.apache.hbase:hbase-client": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hbase:hbase-common": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-hadoop-compat": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-hadoop2-compat": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-procedure": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-protocol": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-server": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive.hcatalog:hive-hcatalog-core": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client" + ] + }, + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-webhcat-java-client" + ] + }, + "org.apache.hive.hcatalog:hive-webhcat-java-client": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner" + ] + }, "org.apache.hive.shims:hive-shims-common": { "locked": "2.3.7", "transitive": [ "org.apache.hive:hive-shims" ] }, + "org.apache.hive:hive-cli": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core" + ] + }, "org.apache.hive:hive-common": { "locked": "2.3.7", "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez", "org.apache.hive:hive-serde" ] }, - "org.apache.hive:hive-serde": { + "org.apache.hive:hive-exec": { "locked": "2.3.7" }, + "org.apache.hive:hive-jdbc": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.hive:hive-llap-client": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez" + ] + }, + "org.apache.hive:hive-llap-common": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive:hive-llap-server": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-service" + ] + }, + "org.apache.hive:hive-llap-tez": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive:hive-metastore": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-service" + ] + }, + "org.apache.hive:hive-serde": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hive:hive-service": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc" + ] + }, "org.apache.hive:hive-service-rpc": { "locked": "2.3.7", "transitive": [ - "org.apache.hive:hive-serde" + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-serde", + "org.apache.hive:hive-service" ] }, "org.apache.hive:hive-shims": { "locked": "2.3.7", "transitive": [ + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde" ] }, @@ -5496,26 +7969,41 @@ "org.apache.hive:hive-common" ] }, + "org.apache.hive:hive-vector-code-gen": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.htrace:htrace-core": { "locked": "3.1.0-incubating", "transitive": [ "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server" ] }, "org.apache.httpcomponents:httpclient": { - "locked": "4.4.1", + "locked": "4.5.2", "transitive": [ "net.java.dev.jets3t:jets3t", + "org.apache.calcite.avatica:avatica", "org.apache.hadoop:hadoop-auth", + "org.apache.hive:hive-jdbc", + "org.apache.slider:slider-core", "org.apache.thrift:libthrift" ] }, "org.apache.httpcomponents:httpcore": { - "locked": "4.4.1", + "locked": "4.4.4", "transitive": [ "net.java.dev.jets3t:jets3t", + "org.apache.calcite.avatica:avatica", + "org.apache.hive:hive-jdbc", "org.apache.httpcomponents:httpclient", + "org.apache.slider:slider-core", "org.apache.thrift:libthrift" ] }, @@ -5558,6 +8046,12 @@ "org.apache.iceberg:iceberg-parquet": { "project": true }, + "org.apache.ivy:ivy": { + "locked": "2.4.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.logging.log4j:log4j-1.2-api": { "locked": "2.6.2", "transitive": [ @@ -5597,6 +8091,7 @@ "locked": "1.6.3", "transitive": [ "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server", "org.apache.iceberg:iceberg-orc" ] }, @@ -5658,21 +8153,139 @@ "org.apache.parquet:parquet-hadoop" ] }, + "org.apache.slider:slider-core": { + "locked": "0.90.2-incubating", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.tez:hadoop-shim": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-internals" + ] + }, + "org.apache.tez:tez-api": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.tez:tez-common": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.tez:tez-dag": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.tez:tez-mapreduce": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.tez:tez-runtime-internals": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-dag" + ] + }, + "org.apache.tez:tez-runtime-library": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce" + ] + }, "org.apache.thrift:libfb303": { "locked": "0.9.3", "transitive": [ + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, "org.apache.thrift:libthrift": { "locked": "0.9.3", "transitive": [ + "co.cask.tephra:tephra-core", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", "org.apache.thrift:libfb303" ] }, + "org.apache.twill:twill-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-common": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-api", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "org.apache.twill:twill-discovery-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.twill:twill-discovery-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core" + ] + }, + "org.apache.twill:twill-zookeeper": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.velocity:velocity": { + "locked": "1.5", + "transitive": [ + "org.apache.hive:hive-vector-code-gen" + ] + }, "org.apache.yetus:audience-annotations": { "locked": "0.11.0", "transitive": [ @@ -5682,13 +8295,29 @@ "org.apache.zookeeper:zookeeper": { "locked": "3.4.6", "transitive": [ + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", "org.apache.hadoop:hadoop-auth", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-server-common", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.slider:slider-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apiguardian:apiguardian-api": { + "locked": "1.1.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.jupiter:junit-jupiter-params", + "org.junit.platform:junit-platform-commons" ] }, "org.checkerframework:checker-qual": { @@ -5697,6 +8326,12 @@ "com.github.ben-manes.caffeine:caffeine" ] }, + "org.codehaus.groovy:groovy-all": { + "locked": "2.4.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.codehaus.jackson:jackson-core-asl": { "locked": "1.9.13", "transitive": [ @@ -5704,6 +8339,9 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-mapper-asl", "org.codehaus.jackson:jackson-xc" @@ -5713,7 +8351,9 @@ "locked": "1.9.13", "transitive": [ "com.sun.jersey:jersey-json", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" ] }, "org.codehaus.jackson:jackson-mapper-asl": { @@ -5723,6 +8363,12 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.slider:slider-core", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-xc" ] @@ -5731,7 +8377,21 @@ "locked": "1.9.13", "transitive": [ "com.sun.jersey:jersey-json", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.slider:slider-core" + ] + }, + "org.codehaus.janino:commons-compiler": { + "locked": "2.7.6", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.codehaus.janino:janino" + ] + }, + "org.codehaus.janino:janino": { + "locked": "2.7.6", + "transitive": [ + "org.apache.calcite:calcite-core" ] }, "org.codehaus.jettison:jettison": { @@ -5741,10 +8401,36 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager" ] }, + "org.datanucleus:datanucleus-api-jdo": { + "locked": "4.2.4", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-core": { + "locked": "4.1.17", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-rdbms": { + "locked": "4.1.19", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:javax.jdo": { + "locked": "3.2.0-m3", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.eclipse.jetty.aggregate:jetty-all": { "locked": "7.6.0.v20120127", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-service" ] }, "org.eclipse.jetty.orbit:javax.servlet": { @@ -5768,23 +8454,108 @@ "junit:junit" ] }, + "org.jamon:jamon-runtime": { + "locked": "2.3.1", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service" + ] + }, "org.jetbrains:annotations": { "locked": "17.0.0", "transitive": [ "org.apache.orc:orc-core" ] }, + "org.jruby.jcodings:jcodings": { + "locked": "1.0.8", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.jruby.joni:joni" + ] + }, + "org.jruby.joni:joni": { + "locked": "2.1.2", + "transitive": [ + "org.apache.hbase:hbase-client" + ] + }, + "org.junit.jupiter:junit-jupiter": { + "locked": "5.6.0", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.junit.jupiter:junit-jupiter-api": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter", + "org.junit.jupiter:junit-jupiter-params" + ] + }, + "org.junit.jupiter:junit-jupiter-params": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.platform:junit-platform-commons": { + "locked": "1.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api" + ] + }, "org.mockito:mockito-core": { "locked": "1.10.19" }, + "org.objenesis:objenesis": { + "locked": "2.1", + "transitive": [ + "com.esotericsoftware.kryo:kryo" + ] + }, + "org.opentest4j:opentest4j": { + "locked": "1.2.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api" + ] + }, + "org.ow2.asm:asm-all": { + "locked": "5.0.2", + "transitive": [ + "org.apache.twill:twill-core" + ] + }, + "org.reflections:reflections": { + "locked": "0.9.8", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.roaringbitmap:RoaringBitmap": { + "locked": "0.4.9", + "transitive": [ + "org.apache.tez:tez-runtime-library" + ] + }, "org.slf4j:slf4j-api": { "locked": "1.7.25", "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", + "com.jolbox:bonecp", + "com.ning:async-http-client", + "com.yammer.metrics:metrics-core", + "com.zaxxer:HikariCP", "io.dropwizard.metrics:metrics-core", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", + "org.apache.calcite.avatica:avatica-metrics", + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid", "org.apache.curator:curator-client", "org.apache.directory.api:api-asn1-api", "org.apache.directory.api:api-util", @@ -5799,12 +8570,25 @@ "org.apache.hadoop:hadoop-mapreduce-client-shuffle", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", "org.apache.hive:hive-shims", "org.apache.hive:hive-storage-api", + "org.apache.hive:hive-vector-code-gen", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -5816,7 +8600,16 @@ "org.apache.orc:orc-shims", "org.apache.parquet:parquet-common", "org.apache.parquet:parquet-format-structures", + "org.apache.slider:slider-core", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-library", "org.apache.thrift:libthrift", + "org.apache.twill:twill-common", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", "org.apache.zookeeper:zookeeper", "org.slf4j:slf4j-simple" ] @@ -5842,15 +8635,31 @@ "org.apache.parquet:parquet-hadoop" ] }, + "oro:oro": { + "locked": "2.0.8", + "transitive": [ + "org.apache.velocity:velocity" + ] + }, + "stax:stax-api": { + "locked": "1.0.1", + "transitive": [ + "org.apache.hive:hive-exec", + "org.codehaus.jettison:jettison" + ] + }, "tomcat:jasper-compiler": { "locked": "5.5.23", "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, "tomcat:jasper-runtime": { "locked": "5.5.23", "transitive": [ + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, @@ -5863,6 +8672,7 @@ "xml-apis:xml-apis": { "locked": "1.3.04", "transitive": [ + "dom4j:dom4j", "xerces:xercesImpl" ] }, @@ -5907,10 +8717,66 @@ "asm:asm-commons" ] }, + "ch.qos.logback:logback-classic": { + "locked": "1.0.9", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "ch.qos.logback:logback-core": { + "locked": "1.0.9", + "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "co.cask.tephra:tephra-api": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-core": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-hbase-compat-1.0": { + "locked": "0.6.0", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "com.beust:jcommander": { + "locked": "1.30", + "transitive": [ + "org.apache.slider:slider-core" + ] + }, + "com.esotericsoftware.kryo:kryo": { + "locked": "2.24.0", + "requested": "2.24.0" + }, + "com.esotericsoftware.minlog:minlog": { + "locked": "1.2", + "transitive": [ + "com.esotericsoftware.kryo:kryo" + ] + }, "com.fasterxml.jackson.core:jackson-annotations": { "locked": "2.10.2", + "requested": "2.6.5", "transitive": [ - "com.fasterxml.jackson.core:jackson-databind" + "com.fasterxml.jackson.core:jackson-databind", + "org.apache.calcite.avatica:avatica" ] }, "com.fasterxml.jackson.core:jackson-core": { @@ -5918,6 +8784,7 @@ "transitive": [ "com.fasterxml.jackson.core:jackson-databind", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", "org.apache.iceberg:iceberg-core" ] }, @@ -5926,6 +8793,7 @@ "transitive": [ "io.dropwizard.metrics:metrics-json", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", "org.apache.hive:hive-common", "org.apache.iceberg:iceberg-core" ] @@ -5945,6 +8813,15 @@ "com.github.stephenc.findbugs:findbugs-annotations": { "locked": "1.3.9-1", "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -5956,14 +8833,26 @@ "com.google.code.findbugs:jsr305": { "locked": "3.0.0", "transitive": [ + "org.apache.calcite:calcite-core", "org.apache.hadoop:hadoop-common", - "org.apache.hive:hive-serde" + "org.apache.hive:hive-serde", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-internals", + "org.apache.twill:twill-api", + "org.apache.twill:twill-common", + "org.apache.twill:twill-zookeeper" ] }, "com.google.code.gson:gson": { "locked": "2.2.4", "transitive": [ - "org.apache.hadoop:hadoop-common" + "co.cask.tephra:tephra-core", + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" ] }, "com.google.errorprone:error_prone_annotations": { @@ -5973,8 +8862,14 @@ ] }, "com.google.guava:guava": { - "locked": "16.0.1", - "transitive": [ + "locked": "18.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "com.jolbox:bonecp", + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid", + "org.apache.calcite:calcite-linq4j", + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", @@ -5988,21 +8883,51 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", "org.apache.hadoop:hadoop-yarn-server-web-proxy", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-vector-code-gen", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", + "org.reflections:reflections" + ] + }, + "com.google.inject.extensions:guice-assistedinject": { + "locked": "3.0", + "transitive": [ + "co.cask.tephra:tephra-core" ] }, "com.google.inject.extensions:guice-servlet": { "locked": "3.0", "transitive": [ "com.sun.jersey.contribs:jersey-guice", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.slider:slider-core" ] }, "com.google.inject:guice": { "locked": "3.0", "transitive": [ + "co.cask.tephra:tephra-core", + "com.google.inject.extensions:guice-assistedinject", "com.google.inject.extensions:guice-servlet", "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-yarn-common", @@ -6012,8 +8937,9 @@ ] }, "com.google.protobuf:protobuf-java": { - "locked": "2.5.0", + "locked": "3.0.0-beta-1", "transitive": [ + "org.apache.calcite.avatica:avatica", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-mapreduce-client-app", @@ -6027,7 +8953,19 @@ "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.orc:orc-core" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-metastore", + "org.apache.orc:orc-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "com.jamesmurty.utils:java-xmlbuilder": { @@ -6042,6 +8980,28 @@ "org.apache.hadoop:hadoop-common" ] }, + "com.jolbox:bonecp": { + "locked": "0.8.0.RELEASE", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "com.klarna:hiverunner": { + "locked": "5.2.1", + "requested": "5.2.1" + }, + "com.lmax:disruptor": { + "locked": "3.3.0", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "com.ning:async-http-client": { + "locked": "1.8.16", + "transitive": [ + "org.apache.tez:tez-runtime-library" + ] + }, "com.sun.jersey.contribs:jersey-guice": { "locked": "1.9", "transitive": [ @@ -6057,7 +9017,9 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "com.sun.jersey:jersey-core": { @@ -6067,10 +9029,12 @@ "com.sun.jersey:jersey-json", "com.sun.jersey:jersey-server", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-server" ] }, "com.sun.jersey:jersey-json": { @@ -6080,7 +9044,9 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "com.sun.jersey:jersey-server": { @@ -6088,7 +9054,10 @@ "transitive": [ "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" ] }, "com.sun.xml.bind:jaxb-impl": { @@ -6100,7 +9069,21 @@ "com.tdunning:json": { "locked": "1.8", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server" + ] + }, + "com.yammer.metrics:metrics-core": { + "locked": "2.2.0", + "transitive": [ + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server" + ] + }, + "com.zaxxer:HikariCP": { + "locked": "2.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" ] }, "commons-beanutils:commons-beanutils": { @@ -6122,8 +9105,13 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", - "org.apache.hive:hive-service-rpc" + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-service", + "org.apache.hive:hive-service-rpc", + "org.apache.tez:tez-dag" ] }, "commons-codec:commons-codec": { @@ -6136,9 +9124,17 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-llap-server", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", - "org.apache.httpcomponents:httpclient" + "org.apache.httpcomponents:httpclient", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-library" ] }, "commons-collections:commons-collections": { @@ -6146,7 +9142,10 @@ "transitive": [ "commons-configuration:commons-configuration", "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice" + "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.tez:tez-mapreduce" ] }, "commons-configuration:commons-configuration": { @@ -6155,6 +9154,19 @@ "org.apache.hadoop:hadoop-common" ] }, + "commons-daemon:commons-daemon": { + "locked": "1.0.13", + "transitive": [ + "org.apache.hadoop:hadoop-hdfs" + ] + }, + "commons-dbcp:commons-dbcp": { + "locked": "1.4", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.apache.hive:hive-metastore" + ] + }, "commons-digester:commons-digester": { "locked": "1.8", "transitive": [ @@ -6170,7 +9182,10 @@ "commons-httpclient:commons-httpclient": { "locked": "3.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core" ] }, "commons-io:commons-io": { @@ -6179,7 +9194,13 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "commons-lang:commons-lang": { @@ -6193,12 +9214,27 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server", "org.apache.hive.shims:hive-shims-0.23", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-storage-api", - "org.apache.orc:orc-core" + "org.apache.hive:hive-vector-code-gen", + "org.apache.orc:orc-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-library", + "org.apache.velocity:velocity" ] }, "commons-logging:commons-logging": { @@ -6211,6 +9247,7 @@ "commons-el:commons-el", "commons-httpclient:commons-httpclient", "net.java.dev.jets3t:jets3t", + "net.sf.jpam:jpam", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-api", @@ -6221,7 +9258,16 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", "org.apache.hadoop:hadoop-yarn-server-web-proxy", - "org.apache.httpcomponents:httpclient" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.httpcomponents:httpclient", + "org.apache.slider:slider-core" ] }, "commons-net:commons-net": { @@ -6233,9 +9279,17 @@ "commons-pool:commons-pool": { "locked": "1.6", "transitive": [ + "commons-dbcp:commons-dbcp", + "org.apache.hive:hive-metastore", "org.apache.parquet:parquet-hadoop" ] }, + "dom4j:dom4j": { + "locked": "1.6.1", + "transitive": [ + "org.reflections:reflections" + ] + }, "io.airlift:aircompressor": { "locked": "0.15", "transitive": [ @@ -6245,6 +9299,7 @@ "io.dropwizard.metrics:metrics-core": { "locked": "3.1.2", "transitive": [ + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", @@ -6264,16 +9319,41 @@ ] }, "io.netty:netty": { - "locked": "3.7.0.Final", + "locked": "3.9.2.Final", "transitive": [ + "com.ning:async-http-client", "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hive:hive-llap-server", "org.apache.zookeeper:zookeeper" ] }, "io.netty:netty-all": { "locked": "4.0.23.Final", "transitive": [ - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-server" + ] + }, + "it.unimi.dsi:fastutil": { + "locked": "6.5.6", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "jakarta.jms:jakarta.jms-api": { + "locked": "2.0.2", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions" + ] + }, + "javassist:javassist": { + "locked": "3.12.1.GA", + "transitive": [ + "org.reflections:reflections" ] }, "javax.activation:activation": { @@ -6296,6 +9376,12 @@ "com.sun.jersey.contribs:jersey-guice" ] }, + "javax.jdo:jdo-api": { + "locked": "3.0.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "javax.mail:mail": { "locked": "1.4.1", "transitive": [ @@ -6305,7 +9391,8 @@ "javax.servlet.jsp:jsp-api": { "locked": "2.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.slider:slider-core" ] }, "javax.servlet:jsp-api": { @@ -6319,11 +9406,28 @@ "transitive": [ "javax.servlet:jsp-api", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.slider:slider-core", + "org.apache.tez:tez-dag", + "org.eclipse.jetty.aggregate:jetty-all", "tomcat:jasper-runtime" ] }, + "javax.transaction:jta": { + "locked": "1.1", + "transitive": [ + "javax.jdo:jdo-api" + ] + }, + "javax.transaction:transaction-api": { + "locked": "1.1", + "transitive": [ + "org.datanucleus:javax.jdo" + ] + }, "javax.xml.bind:jaxb-api": { "locked": "2.2.11", "transitive": [ @@ -6335,9 +9439,16 @@ "org.apache.orc:orc-core" ] }, + "javolution:javolution": { + "locked": "5.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "jline:jline": { "locked": "2.12", "transitive": [ + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", "org.apache.zookeeper:zookeeper" ] @@ -6345,11 +9456,23 @@ "joda-time:joda-time": { "locked": "2.8.1", "transitive": [ + "org.apache.calcite:calcite-druid", "org.apache.hive:hive-common" ] }, "junit:junit": { - "locked": "4.12" + "locked": "4.12", + "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server" + ] }, "log4j:log4j": { "locked": "1.2.17", @@ -6360,25 +9483,62 @@ "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core", "org.apache.zookeeper:zookeeper" ] }, + "net.hydromatic:eigenbase-properties": { + "locked": "1.1.5", + "transitive": [ + "org.apache.calcite:calcite-core" + ] + }, "net.java.dev.jets3t:jets3t": { "locked": "0.9.0", "transitive": [ "org.apache.hadoop:hadoop-common" ] }, + "net.sf.jpam:jpam": { + "locked": "1.1", + "transitive": [ + "org.apache.hive:hive-service" + ] + }, "net.sf.opencsv:opencsv": { "locked": "2.3", "transitive": [ "org.apache.hive:hive-serde" ] }, + "org.antlr:ST4": { + "locked": "4.0.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.antlr:antlr-runtime": { + "locked": "3.5.2", + "transitive": [ + "org.antlr:ST4", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, "org.apache.ant:ant": { "locked": "1.9.1", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-vector-code-gen" ] }, "org.apache.ant:ant-launcher": { @@ -6391,8 +9551,51 @@ "locked": "1.9.2", "transitive": [ "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hive:hive-llap-server", "org.apache.hive:hive-serde", - "org.apache.iceberg:iceberg-core" + "org.apache.iceberg:iceberg-core", + "org.apache.slider:slider-core" + ] + }, + "org.apache.calcite.avatica:avatica": { + "locked": "1.8.0", + "transitive": [ + "org.apache.calcite:calcite-core" + ] + }, + "org.apache.calcite.avatica:avatica-metrics": { + "locked": "1.8.0", + "transitive": [ + "org.apache.calcite.avatica:avatica" + ] + }, + "org.apache.calcite:calcite-core": { + "locked": "1.10.0", + "transitive": [ + "org.apache.calcite:calcite-druid", + "org.apache.hive:hive-exec" + ] + }, + "org.apache.calcite:calcite-druid": { + "locked": "1.10.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.apache.calcite:calcite-linq4j": { + "locked": "1.10.0", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid" + ] + }, + "org.apache.commons:commons-collections4": { + "locked": "4.1", + "transitive": [ + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag" ] }, "org.apache.commons:commons-compress": { @@ -6401,26 +9604,49 @@ "org.apache.avro:avro", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core" ] }, "org.apache.commons:commons-lang3": { - "locked": "3.1", + "locked": "3.2", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.calcite:calcite-core", + "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-tez" + ] + }, + "org.apache.commons:commons-math": { + "locked": "2.2", + "transitive": [ + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server" ] }, "org.apache.commons:commons-math3": { "locked": "3.1.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.tez:tez-dag" + ] + }, + "org.apache.curator:apache-curator": { + "locked": "2.7.1", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-llap-client" ] }, "org.apache.curator:curator-client": { "locked": "2.7.1", "transitive": [ "org.apache.curator:curator-framework", - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.slider:slider-core" ] }, "org.apache.curator:curator-framework": { @@ -6428,13 +9654,27 @@ "transitive": [ "org.apache.curator:curator-recipes", "org.apache.hadoop:hadoop-auth", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-service", + "org.apache.slider:slider-core" ] }, "org.apache.curator:curator-recipes": { "locked": "2.7.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-service", + "org.apache.slider:slider-core" + ] + }, + "org.apache.derby:derby": { + "locked": "10.10.2.0", + "transitive": [ + "org.apache.hive:hive-metastore" ] }, "org.apache.directory.api:api-asn1-api": { @@ -6484,33 +9724,77 @@ "transitive": [ "org.apache.hadoop:hadoop-client", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-api", + "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-common", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.hadoop:hadoop-archives": { + "locked": "2.7.2", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core" ] }, "org.apache.hadoop:hadoop-auth": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.tez:tez-api", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-client": { - "locked": "2.7.3" + "locked": "2.7.3", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" + ] }, "org.apache.hadoop:hadoop-common": { "locked": "2.7.3", "transitive": [ "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-hdfs": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "org.apache.hadoop:hadoop-mapreduce-client-app": { @@ -6524,14 +9808,22 @@ "transitive": [ "org.apache.hadoop:hadoop-mapreduce-client-app", "org.apache.hadoop:hadoop-mapreduce-client-jobclient", - "org.apache.hadoop:hadoop-mapreduce-client-shuffle" + "org.apache.hadoop:hadoop-mapreduce-client-shuffle", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-mapreduce-client-core": { "locked": "2.7.3", "transitive": [ "org.apache.hadoop:hadoop-client", - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-webhcat-java-client", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-mapreduce-client-jobclient": { @@ -6553,17 +9845,28 @@ "org.apache.hadoop:hadoop-client", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hadoop:hadoop-yarn-server-web-proxy" + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-yarn-client": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-yarn-common": { @@ -6572,11 +9875,24 @@ "org.apache.hadoop:hadoop-mapreduce-client-common", "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-client", + "org.apache.hadoop:hadoop-yarn-registry", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hadoop:hadoop-yarn-server-web-proxy" + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.hadoop:hadoop-yarn-registry": { + "locked": "2.7.1", + "transitive": [ + "org.apache.slider:slider-core" ] }, "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice": { @@ -6611,7 +9927,98 @@ "org.apache.hadoop:hadoop-yarn-server-web-proxy": { "locked": "2.7.2", "transitive": [ - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.tez:tez-dag" + ] + }, + "org.apache.hbase:hbase-annotations": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-protocol" + ] + }, + "org.apache.hbase:hbase-client": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hbase:hbase-common": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-hadoop-compat": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-hadoop2-compat": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-prefix-tree": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-procedure": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-protocol": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-server": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive.hcatalog:hive-hcatalog-core": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client" + ] + }, + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-webhcat-java-client" + ] + }, + "org.apache.hive.hcatalog:hive-webhcat-java-client": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner" ] }, "org.apache.hive.shims:hive-shims-0.23": { @@ -6634,25 +10041,105 @@ "org.apache.hive:hive-shims" ] }, + "org.apache.hive:hive-cli": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core" + ] + }, "org.apache.hive:hive-common": { "locked": "2.3.7", "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez", "org.apache.hive:hive-serde" ] }, - "org.apache.hive:hive-serde": { + "org.apache.hive:hive-exec": { "locked": "2.3.7" }, + "org.apache.hive:hive-jdbc": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.hive:hive-llap-client": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez" + ] + }, + "org.apache.hive:hive-llap-common": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive:hive-llap-server": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-service" + ] + }, + "org.apache.hive:hive-llap-tez": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive:hive-metastore": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-service" + ] + }, + "org.apache.hive:hive-serde": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hive:hive-service": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc" + ] + }, "org.apache.hive:hive-service-rpc": { "locked": "2.3.7", "transitive": [ - "org.apache.hive:hive-serde" + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-serde", + "org.apache.hive:hive-service" ] }, "org.apache.hive:hive-shims": { "locked": "2.3.7", "transitive": [ + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde" ] }, @@ -6662,26 +10149,41 @@ "org.apache.hive:hive-common" ] }, + "org.apache.hive:hive-vector-code-gen": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.htrace:htrace-core": { "locked": "3.1.0-incubating", "transitive": [ "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server" ] }, "org.apache.httpcomponents:httpclient": { - "locked": "4.4.1", + "locked": "4.5.2", "transitive": [ "net.java.dev.jets3t:jets3t", + "org.apache.calcite.avatica:avatica", "org.apache.hadoop:hadoop-auth", + "org.apache.hive:hive-jdbc", + "org.apache.slider:slider-core", "org.apache.thrift:libthrift" ] }, "org.apache.httpcomponents:httpcore": { - "locked": "4.4.1", + "locked": "4.4.4", "transitive": [ "net.java.dev.jets3t:jets3t", + "org.apache.calcite.avatica:avatica", + "org.apache.hive:hive-jdbc", "org.apache.httpcomponents:httpclient", + "org.apache.slider:slider-core", "org.apache.thrift:libthrift" ] }, @@ -6724,6 +10226,12 @@ "org.apache.iceberg:iceberg-parquet": { "project": true }, + "org.apache.ivy:ivy": { + "locked": "2.4.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.logging.log4j:log4j-1.2-api": { "locked": "2.6.2", "transitive": [ @@ -6763,6 +10271,7 @@ "locked": "1.6.3", "transitive": [ "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server", "org.apache.iceberg:iceberg-orc" ] }, @@ -6824,21 +10333,139 @@ "org.apache.parquet:parquet-hadoop" ] }, + "org.apache.slider:slider-core": { + "locked": "0.90.2-incubating", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.tez:hadoop-shim": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-internals" + ] + }, + "org.apache.tez:tez-api": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.tez:tez-common": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.tez:tez-dag": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.tez:tez-mapreduce": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.tez:tez-runtime-internals": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-dag" + ] + }, + "org.apache.tez:tez-runtime-library": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce" + ] + }, "org.apache.thrift:libfb303": { "locked": "0.9.3", "transitive": [ + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, "org.apache.thrift:libthrift": { "locked": "0.9.3", "transitive": [ + "co.cask.tephra:tephra-core", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", "org.apache.thrift:libfb303" ] }, + "org.apache.twill:twill-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-common": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-api", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "org.apache.twill:twill-discovery-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.twill:twill-discovery-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core" + ] + }, + "org.apache.twill:twill-zookeeper": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.velocity:velocity": { + "locked": "1.5", + "transitive": [ + "org.apache.hive:hive-vector-code-gen" + ] + }, "org.apache.yetus:audience-annotations": { "locked": "0.11.0", "transitive": [ @@ -6848,6 +10475,7 @@ "org.apache.zookeeper:zookeeper": { "locked": "3.4.6", "transitive": [ + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", @@ -6855,7 +10483,24 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.slider:slider-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apiguardian:apiguardian-api": { + "locked": "1.1.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.jupiter:junit-jupiter-params", + "org.junit.platform:junit-platform-commons", + "org.junit.platform:junit-platform-engine" ] }, "org.checkerframework:checker-qual": { @@ -6864,6 +10509,12 @@ "com.github.ben-manes.caffeine:caffeine" ] }, + "org.codehaus.groovy:groovy-all": { + "locked": "2.4.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.codehaus.jackson:jackson-core-asl": { "locked": "1.9.13", "transitive": [ @@ -6871,6 +10522,9 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-mapper-asl", "org.codehaus.jackson:jackson-xc" @@ -6880,7 +10534,9 @@ "locked": "1.9.13", "transitive": [ "com.sun.jersey:jersey-json", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" ] }, "org.codehaus.jackson:jackson-mapper-asl": { @@ -6890,15 +10546,35 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.slider:slider-core", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-xc" ] }, - "org.codehaus.jackson:jackson-xc": { - "locked": "1.9.13", + "org.codehaus.jackson:jackson-xc": { + "locked": "1.9.13", + "transitive": [ + "com.sun.jersey:jersey-json", + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.slider:slider-core" + ] + }, + "org.codehaus.janino:commons-compiler": { + "locked": "2.7.6", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.codehaus.janino:janino" + ] + }, + "org.codehaus.janino:janino": { + "locked": "2.7.6", "transitive": [ - "com.sun.jersey:jersey-json", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.calcite:calcite-core" ] }, "org.codehaus.jettison:jettison": { @@ -6910,10 +10586,36 @@ "org.apache.hadoop:hadoop-yarn-server-resourcemanager" ] }, + "org.datanucleus:datanucleus-api-jdo": { + "locked": "4.2.4", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-core": { + "locked": "4.1.17", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-rdbms": { + "locked": "4.1.19", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:javax.jdo": { + "locked": "3.2.0-m3", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.eclipse.jetty.aggregate:jetty-all": { "locked": "7.6.0.v20120127", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-service" ] }, "org.eclipse.jetty.orbit:javax.servlet": { @@ -6940,29 +10642,124 @@ "org.mockito:mockito-core" ] }, + "org.jamon:jamon-runtime": { + "locked": "2.3.1", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service" + ] + }, "org.jetbrains:annotations": { "locked": "17.0.0", "transitive": [ "org.apache.orc:orc-core" ] }, + "org.jruby.jcodings:jcodings": { + "locked": "1.0.8", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.jruby.joni:joni" + ] + }, + "org.jruby.joni:joni": { + "locked": "2.1.2", + "transitive": [ + "org.apache.hbase:hbase-client" + ] + }, + "org.junit.jupiter:junit-jupiter": { + "locked": "5.6.0", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.junit.jupiter:junit-jupiter-api": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter", + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.jupiter:junit-jupiter-params" + ] + }, + "org.junit.jupiter:junit-jupiter-engine": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.jupiter:junit-jupiter-params": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.platform:junit-platform-commons": { + "locked": "1.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.junit.platform:junit-platform-engine": { + "locked": "1.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-engine" + ] + }, "org.mockito:mockito-core": { "locked": "1.10.19" }, "org.objenesis:objenesis": { "locked": "2.1", "transitive": [ + "com.esotericsoftware.kryo:kryo", "org.mockito:mockito-core" ] }, + "org.opentest4j:opentest4j": { + "locked": "1.2.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.ow2.asm:asm-all": { + "locked": "5.0.2", + "transitive": [ + "org.apache.twill:twill-core" + ] + }, + "org.reflections:reflections": { + "locked": "0.9.8", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.roaringbitmap:RoaringBitmap": { + "locked": "0.4.9", + "transitive": [ + "org.apache.tez:tez-runtime-library" + ] + }, "org.slf4j:slf4j-api": { "locked": "1.7.25", "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", + "com.jolbox:bonecp", + "com.ning:async-http-client", + "com.yammer.metrics:metrics-core", + "com.zaxxer:HikariCP", "io.dropwizard.metrics:metrics-core", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", + "org.apache.calcite.avatica:avatica-metrics", + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid", "org.apache.curator:curator-client", "org.apache.directory.api:api-asn1-api", "org.apache.directory.api:api-util", @@ -6978,14 +10775,27 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client", "org.apache.hive.shims:hive-shims-0.23", "org.apache.hive.shims:hive-shims-common", "org.apache.hive.shims:hive-shims-scheduler", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", "org.apache.hive:hive-shims", "org.apache.hive:hive-storage-api", + "org.apache.hive:hive-vector-code-gen", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -6997,7 +10807,16 @@ "org.apache.orc:orc-shims", "org.apache.parquet:parquet-common", "org.apache.parquet:parquet-format-structures", + "org.apache.slider:slider-core", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-library", "org.apache.thrift:libthrift", + "org.apache.twill:twill-common", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", "org.apache.zookeeper:zookeeper", "org.slf4j:slf4j-simple" ] @@ -7023,15 +10842,32 @@ "org.apache.parquet:parquet-hadoop" ] }, + "oro:oro": { + "locked": "2.0.8", + "transitive": [ + "org.apache.velocity:velocity" + ] + }, + "stax:stax-api": { + "locked": "1.0.1", + "transitive": [ + "org.apache.hive:hive-exec", + "org.codehaus.jettison:jettison" + ] + }, "tomcat:jasper-compiler": { "locked": "5.5.23", "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, "tomcat:jasper-runtime": { "locked": "5.5.23", "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, @@ -7044,6 +10880,7 @@ "xml-apis:xml-apis": { "locked": "1.3.04", "transitive": [ + "dom4j:dom4j", "xerces:xercesImpl" ] }, @@ -7088,10 +10925,66 @@ "asm:asm-commons" ] }, + "ch.qos.logback:logback-classic": { + "locked": "1.0.9", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "ch.qos.logback:logback-core": { + "locked": "1.0.9", + "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "co.cask.tephra:tephra-api": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-core": { + "locked": "0.6.0", + "transitive": [ + "co.cask.tephra:tephra-hbase-compat-1.0", + "org.apache.hive:hive-metastore" + ] + }, + "co.cask.tephra:tephra-hbase-compat-1.0": { + "locked": "0.6.0", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "com.beust:jcommander": { + "locked": "1.30", + "transitive": [ + "org.apache.slider:slider-core" + ] + }, + "com.esotericsoftware.kryo:kryo": { + "locked": "2.24.0", + "requested": "2.24.0" + }, + "com.esotericsoftware.minlog:minlog": { + "locked": "1.2", + "transitive": [ + "com.esotericsoftware.kryo:kryo" + ] + }, "com.fasterxml.jackson.core:jackson-annotations": { "locked": "2.10.2", + "requested": "2.6.5", "transitive": [ - "com.fasterxml.jackson.core:jackson-databind" + "com.fasterxml.jackson.core:jackson-databind", + "org.apache.calcite.avatica:avatica" ] }, "com.fasterxml.jackson.core:jackson-core": { @@ -7099,6 +10992,7 @@ "transitive": [ "com.fasterxml.jackson.core:jackson-databind", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", "org.apache.iceberg:iceberg-core" ] }, @@ -7107,6 +11001,7 @@ "transitive": [ "io.dropwizard.metrics:metrics-json", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", "org.apache.hive:hive-common", "org.apache.iceberg:iceberg-core" ] @@ -7126,6 +11021,15 @@ "com.github.stephenc.findbugs:findbugs-annotations": { "locked": "1.3.9-1", "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -7137,14 +11041,26 @@ "com.google.code.findbugs:jsr305": { "locked": "3.0.0", "transitive": [ + "org.apache.calcite:calcite-core", "org.apache.hadoop:hadoop-common", - "org.apache.hive:hive-serde" + "org.apache.hive:hive-serde", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-internals", + "org.apache.twill:twill-api", + "org.apache.twill:twill-common", + "org.apache.twill:twill-zookeeper" ] }, "com.google.code.gson:gson": { "locked": "2.2.4", "transitive": [ - "org.apache.hadoop:hadoop-common" + "co.cask.tephra:tephra-core", + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" ] }, "com.google.errorprone:error_prone_annotations": { @@ -7154,8 +11070,14 @@ ] }, "com.google.guava:guava": { - "locked": "16.0.1", - "transitive": [ + "locked": "18.0", + "transitive": [ + "co.cask.tephra:tephra-core", + "com.jolbox:bonecp", + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid", + "org.apache.calcite:calcite-linq4j", + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", @@ -7169,21 +11091,51 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", "org.apache.hadoop:hadoop-yarn-server-web-proxy", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-vector-code-gen", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", + "org.reflections:reflections" + ] + }, + "com.google.inject.extensions:guice-assistedinject": { + "locked": "3.0", + "transitive": [ + "co.cask.tephra:tephra-core" ] }, "com.google.inject.extensions:guice-servlet": { "locked": "3.0", "transitive": [ "com.sun.jersey.contribs:jersey-guice", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.slider:slider-core" ] }, "com.google.inject:guice": { "locked": "3.0", "transitive": [ + "co.cask.tephra:tephra-core", + "com.google.inject.extensions:guice-assistedinject", "com.google.inject.extensions:guice-servlet", "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-yarn-common", @@ -7193,8 +11145,9 @@ ] }, "com.google.protobuf:protobuf-java": { - "locked": "2.5.0", + "locked": "3.0.0-beta-1", "transitive": [ + "org.apache.calcite.avatica:avatica", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-mapreduce-client-app", @@ -7208,7 +11161,19 @@ "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.orc:orc-core" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-metastore", + "org.apache.orc:orc-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "com.jamesmurty.utils:java-xmlbuilder": { @@ -7223,6 +11188,28 @@ "org.apache.hadoop:hadoop-common" ] }, + "com.jolbox:bonecp": { + "locked": "0.8.0.RELEASE", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "com.klarna:hiverunner": { + "locked": "5.2.1", + "requested": "5.2.1" + }, + "com.lmax:disruptor": { + "locked": "3.3.0", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "com.ning:async-http-client": { + "locked": "1.8.16", + "transitive": [ + "org.apache.tez:tez-runtime-library" + ] + }, "com.sun.jersey.contribs:jersey-guice": { "locked": "1.9", "transitive": [ @@ -7238,7 +11225,9 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "com.sun.jersey:jersey-core": { @@ -7248,10 +11237,12 @@ "com.sun.jersey:jersey-json", "com.sun.jersey:jersey-server", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-server" ] }, "com.sun.jersey:jersey-json": { @@ -7261,7 +11252,9 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-nodemanager", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "com.sun.jersey:jersey-server": { @@ -7269,7 +11262,10 @@ "transitive": [ "com.sun.jersey.contribs:jersey-guice", "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" ] }, "com.sun.xml.bind:jaxb-impl": { @@ -7281,7 +11277,21 @@ "com.tdunning:json": { "locked": "1.8", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server" + ] + }, + "com.yammer.metrics:metrics-core": { + "locked": "2.2.0", + "transitive": [ + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server" + ] + }, + "com.zaxxer:HikariCP": { + "locked": "2.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" ] }, "commons-beanutils:commons-beanutils": { @@ -7303,8 +11313,13 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", - "org.apache.hive:hive-service-rpc" + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-service", + "org.apache.hive:hive-service-rpc", + "org.apache.tez:tez-dag" ] }, "commons-codec:commons-codec": { @@ -7317,9 +11332,17 @@ "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-llap-server", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", - "org.apache.httpcomponents:httpclient" + "org.apache.httpcomponents:httpclient", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-library" ] }, "commons-collections:commons-collections": { @@ -7327,7 +11350,10 @@ "transitive": [ "commons-configuration:commons-configuration", "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice" + "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.tez:tez-mapreduce" ] }, "commons-configuration:commons-configuration": { @@ -7336,6 +11362,19 @@ "org.apache.hadoop:hadoop-common" ] }, + "commons-daemon:commons-daemon": { + "locked": "1.0.13", + "transitive": [ + "org.apache.hadoop:hadoop-hdfs" + ] + }, + "commons-dbcp:commons-dbcp": { + "locked": "1.4", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.apache.hive:hive-metastore" + ] + }, "commons-digester:commons-digester": { "locked": "1.8", "transitive": [ @@ -7351,7 +11390,10 @@ "commons-httpclient:commons-httpclient": { "locked": "3.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core" ] }, "commons-io:commons-io": { @@ -7360,7 +11402,13 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "commons-lang:commons-lang": { @@ -7374,12 +11422,27 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server", "org.apache.hive.shims:hive-shims-0.23", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-storage-api", - "org.apache.orc:orc-core" + "org.apache.hive:hive-vector-code-gen", + "org.apache.orc:orc-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-library", + "org.apache.velocity:velocity" ] }, "commons-logging:commons-logging": { @@ -7392,6 +11455,7 @@ "commons-el:commons-el", "commons-httpclient:commons-httpclient", "net.java.dev.jets3t:jets3t", + "net.sf.jpam:jpam", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-api", @@ -7402,7 +11466,16 @@ "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", "org.apache.hadoop:hadoop-yarn-server-web-proxy", - "org.apache.httpcomponents:httpclient" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.httpcomponents:httpclient", + "org.apache.slider:slider-core" ] }, "commons-net:commons-net": { @@ -7414,9 +11487,17 @@ "commons-pool:commons-pool": { "locked": "1.6", "transitive": [ + "commons-dbcp:commons-dbcp", + "org.apache.hive:hive-metastore", "org.apache.parquet:parquet-hadoop" ] }, + "dom4j:dom4j": { + "locked": "1.6.1", + "transitive": [ + "org.reflections:reflections" + ] + }, "io.airlift:aircompressor": { "locked": "0.15", "transitive": [ @@ -7426,6 +11507,7 @@ "io.dropwizard.metrics:metrics-core": { "locked": "3.1.2", "transitive": [ + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", @@ -7445,16 +11527,41 @@ ] }, "io.netty:netty": { - "locked": "3.7.0.Final", + "locked": "3.9.2.Final", "transitive": [ + "com.ning:async-http-client", "org.apache.hadoop:hadoop-hdfs", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hive:hive-llap-server", "org.apache.zookeeper:zookeeper" ] }, "io.netty:netty-all": { "locked": "4.0.23.Final", "transitive": [ - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-server" + ] + }, + "it.unimi.dsi:fastutil": { + "locked": "6.5.6", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "jakarta.jms:jakarta.jms-api": { + "locked": "2.0.2", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions" + ] + }, + "javassist:javassist": { + "locked": "3.12.1.GA", + "transitive": [ + "org.reflections:reflections" ] }, "javax.activation:activation": { @@ -7477,6 +11584,12 @@ "com.sun.jersey.contribs:jersey-guice" ] }, + "javax.jdo:jdo-api": { + "locked": "3.0.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "javax.mail:mail": { "locked": "1.4.1", "transitive": [ @@ -7486,7 +11599,8 @@ "javax.servlet.jsp:jsp-api": { "locked": "2.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.slider:slider-core" ] }, "javax.servlet:jsp-api": { @@ -7500,11 +11614,28 @@ "transitive": [ "javax.servlet:jsp-api", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.slider:slider-core", + "org.apache.tez:tez-dag", + "org.eclipse.jetty.aggregate:jetty-all", "tomcat:jasper-runtime" ] }, + "javax.transaction:jta": { + "locked": "1.1", + "transitive": [ + "javax.jdo:jdo-api" + ] + }, + "javax.transaction:transaction-api": { + "locked": "1.1", + "transitive": [ + "org.datanucleus:javax.jdo" + ] + }, "javax.xml.bind:jaxb-api": { "locked": "2.2.11", "transitive": [ @@ -7516,9 +11647,16 @@ "org.apache.orc:orc-core" ] }, + "javolution:javolution": { + "locked": "5.5.1", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "jline:jline": { "locked": "2.12", "transitive": [ + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", "org.apache.zookeeper:zookeeper" ] @@ -7526,11 +11664,23 @@ "joda-time:joda-time": { "locked": "2.8.1", "transitive": [ + "org.apache.calcite:calcite-druid", "org.apache.hive:hive-common" ] }, "junit:junit": { - "locked": "4.12" + "locked": "4.12", + "transitive": [ + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server" + ] }, "log4j:log4j": { "locked": "1.2.17", @@ -7541,25 +11691,62 @@ "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hbase:hbase-annotations", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-protocol", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core", "org.apache.zookeeper:zookeeper" ] }, + "net.hydromatic:eigenbase-properties": { + "locked": "1.1.5", + "transitive": [ + "org.apache.calcite:calcite-core" + ] + }, "net.java.dev.jets3t:jets3t": { "locked": "0.9.0", "transitive": [ "org.apache.hadoop:hadoop-common" ] }, + "net.sf.jpam:jpam": { + "locked": "1.1", + "transitive": [ + "org.apache.hive:hive-service" + ] + }, "net.sf.opencsv:opencsv": { "locked": "2.3", "transitive": [ "org.apache.hive:hive-serde" ] }, + "org.antlr:ST4": { + "locked": "4.0.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.antlr:antlr-runtime": { + "locked": "3.5.2", + "transitive": [ + "org.antlr:ST4", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, "org.apache.ant:ant": { "locked": "1.9.1", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-vector-code-gen" ] }, "org.apache.ant:ant-launcher": { @@ -7572,8 +11759,51 @@ "locked": "1.9.2", "transitive": [ "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", + "org.apache.hive:hive-llap-server", "org.apache.hive:hive-serde", - "org.apache.iceberg:iceberg-core" + "org.apache.iceberg:iceberg-core", + "org.apache.slider:slider-core" + ] + }, + "org.apache.calcite.avatica:avatica": { + "locked": "1.8.0", + "transitive": [ + "org.apache.calcite:calcite-core" + ] + }, + "org.apache.calcite.avatica:avatica-metrics": { + "locked": "1.8.0", + "transitive": [ + "org.apache.calcite.avatica:avatica" + ] + }, + "org.apache.calcite:calcite-core": { + "locked": "1.10.0", + "transitive": [ + "org.apache.calcite:calcite-druid", + "org.apache.hive:hive-exec" + ] + }, + "org.apache.calcite:calcite-druid": { + "locked": "1.10.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, + "org.apache.calcite:calcite-linq4j": { + "locked": "1.10.0", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid" + ] + }, + "org.apache.commons:commons-collections4": { + "locked": "4.1", + "transitive": [ + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag" ] }, "org.apache.commons:commons-compress": { @@ -7582,26 +11812,49 @@ "org.apache.avro:avro", "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-common", - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.slider:slider-core" ] }, "org.apache.commons:commons-lang3": { - "locked": "3.1", + "locked": "3.2", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.calcite:calcite-core", + "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-tez" + ] + }, + "org.apache.commons:commons-math": { + "locked": "2.2", + "transitive": [ + "org.apache.hbase:hbase-hadoop-compat", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server" ] }, "org.apache.commons:commons-math3": { "locked": "3.1.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.tez:tez-dag" + ] + }, + "org.apache.curator:apache-curator": { + "locked": "2.7.1", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-llap-client" ] }, "org.apache.curator:curator-client": { "locked": "2.7.1", "transitive": [ "org.apache.curator:curator-framework", - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.slider:slider-core" ] }, "org.apache.curator:curator-framework": { @@ -7609,13 +11862,27 @@ "transitive": [ "org.apache.curator:curator-recipes", "org.apache.hadoop:hadoop-auth", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-service", + "org.apache.slider:slider-core" ] }, "org.apache.curator:curator-recipes": { "locked": "2.7.1", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hive:hive-service", + "org.apache.slider:slider-core" + ] + }, + "org.apache.derby:derby": { + "locked": "10.10.2.0", + "transitive": [ + "org.apache.hive:hive-metastore" ] }, "org.apache.directory.api:api-asn1-api": { @@ -7665,33 +11932,77 @@ "transitive": [ "org.apache.hadoop:hadoop-client", "org.apache.hadoop:hadoop-common", + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-api", + "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-common", - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.hadoop:hadoop-archives": { + "locked": "2.7.2", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core" ] }, "org.apache.hadoop:hadoop-auth": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-common" + "org.apache.hadoop:hadoop-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.tez:tez-api", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-client": { - "locked": "2.7.3" + "locked": "2.7.3", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" + ] }, "org.apache.hadoop:hadoop-common": { "locked": "2.7.3", "transitive": [ "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-hdfs": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-client" + "org.apache.hadoop:hadoop-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.slider:slider-core", + "org.apache.tez:tez-api" ] }, "org.apache.hadoop:hadoop-mapreduce-client-app": { @@ -7705,14 +12016,22 @@ "transitive": [ "org.apache.hadoop:hadoop-mapreduce-client-app", "org.apache.hadoop:hadoop-mapreduce-client-jobclient", - "org.apache.hadoop:hadoop-mapreduce-client-shuffle" + "org.apache.hadoop:hadoop-mapreduce-client-shuffle", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-mapreduce-client-core": { "locked": "2.7.3", "transitive": [ "org.apache.hadoop:hadoop-client", - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-webhcat-java-client", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-mapreduce-client-jobclient": { @@ -7734,17 +12053,28 @@ "org.apache.hadoop:hadoop-client", "org.apache.hadoop:hadoop-yarn-client", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hadoop:hadoop-yarn-server-web-proxy" + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" ] }, "org.apache.hadoop:hadoop-yarn-client": { "locked": "2.7.3", "transitive": [ - "org.apache.hadoop:hadoop-mapreduce-client-common" + "org.apache.hadoop:hadoop-mapreduce-client-common", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce" ] }, "org.apache.hadoop:hadoop-yarn-common": { @@ -7753,11 +12083,24 @@ "org.apache.hadoop:hadoop-mapreduce-client-common", "org.apache.hadoop:hadoop-mapreduce-client-core", "org.apache.hadoop:hadoop-yarn-client", + "org.apache.hadoop:hadoop-yarn-registry", "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hadoop:hadoop-yarn-server-web-proxy" + "org.apache.hadoop:hadoop-yarn-server-web-proxy", + "org.apache.tez:tez-api", + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.hadoop:hadoop-yarn-registry": { + "locked": "2.7.1", + "transitive": [ + "org.apache.slider:slider-core" ] }, "org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice": { @@ -7792,7 +12135,98 @@ "org.apache.hadoop:hadoop-yarn-server-web-proxy": { "locked": "2.7.2", "transitive": [ - "org.apache.hadoop:hadoop-yarn-server-resourcemanager" + "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.tez:tez-dag" + ] + }, + "org.apache.hbase:hbase-annotations": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-protocol" + ] + }, + "org.apache.hbase:hbase-client": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hbase:hbase-common": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-hadoop-compat": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-hadoop2-compat", + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-hadoop2-compat": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-prefix-tree", + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hbase:hbase-prefix-tree": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-procedure": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-protocol": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-procedure", + "org.apache.hbase:hbase-server" + ] + }, + "org.apache.hbase:hbase-server": { + "locked": "1.1.1", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive.hcatalog:hive-hcatalog-core": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client" + ] + }, + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-webhcat-java-client" + ] + }, + "org.apache.hive.hcatalog:hive-webhcat-java-client": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner" ] }, "org.apache.hive.shims:hive-shims-0.23": { @@ -7815,25 +12249,105 @@ "org.apache.hive:hive-shims" ] }, + "org.apache.hive:hive-cli": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core" + ] + }, "org.apache.hive:hive-common": { "locked": "2.3.7", "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez", "org.apache.hive:hive-serde" ] }, - "org.apache.hive:hive-serde": { + "org.apache.hive:hive-exec": { "locked": "2.3.7" }, + "org.apache.hive:hive-jdbc": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.hive:hive-llap-client": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez" + ] + }, + "org.apache.hive:hive-llap-common": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive:hive-llap-server": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-service" + ] + }, + "org.apache.hive:hive-llap-tez": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.hive:hive-metastore": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-service" + ] + }, + "org.apache.hive:hive-serde": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore" + ] + }, + "org.apache.hive:hive-service": { + "locked": "2.3.7", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc" + ] + }, "org.apache.hive:hive-service-rpc": { "locked": "2.3.7", "transitive": [ - "org.apache.hive:hive-serde" + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-serde", + "org.apache.hive:hive-service" ] }, "org.apache.hive:hive-shims": { "locked": "2.3.7", "transitive": [ + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde" ] }, @@ -7843,26 +12357,41 @@ "org.apache.hive:hive-common" ] }, + "org.apache.hive:hive-vector-code-gen": { + "locked": "2.3.7", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.htrace:htrace-core": { "locked": "3.1.0-incubating", "transitive": [ "org.apache.hadoop:hadoop-common", - "org.apache.hadoop:hadoop-hdfs" + "org.apache.hadoop:hadoop-hdfs", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-common", + "org.apache.hbase:hbase-server" ] }, "org.apache.httpcomponents:httpclient": { - "locked": "4.4.1", + "locked": "4.5.2", "transitive": [ "net.java.dev.jets3t:jets3t", + "org.apache.calcite.avatica:avatica", "org.apache.hadoop:hadoop-auth", + "org.apache.hive:hive-jdbc", + "org.apache.slider:slider-core", "org.apache.thrift:libthrift" ] }, "org.apache.httpcomponents:httpcore": { - "locked": "4.4.1", + "locked": "4.4.4", "transitive": [ "net.java.dev.jets3t:jets3t", + "org.apache.calcite.avatica:avatica", + "org.apache.hive:hive-jdbc", "org.apache.httpcomponents:httpclient", + "org.apache.slider:slider-core", "org.apache.thrift:libthrift" ] }, @@ -7905,6 +12434,12 @@ "org.apache.iceberg:iceberg-parquet": { "project": true }, + "org.apache.ivy:ivy": { + "locked": "2.4.0", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.apache.logging.log4j:log4j-1.2-api": { "locked": "2.6.2", "transitive": [ @@ -7944,6 +12479,7 @@ "locked": "1.6.3", "transitive": [ "org.apache.hive:hive-common", + "org.apache.hive:hive-llap-server", "org.apache.iceberg:iceberg-orc" ] }, @@ -8005,21 +12541,139 @@ "org.apache.parquet:parquet-hadoop" ] }, + "org.apache.slider:slider-core": { + "locked": "0.90.2-incubating", + "transitive": [ + "org.apache.hive:hive-llap-server" + ] + }, + "org.apache.tez:hadoop-shim": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-runtime-internals" + ] + }, + "org.apache.tez:tez-api": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-common", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.tez:tez-common": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-internals", + "org.apache.tez:tez-runtime-library" + ] + }, + "org.apache.tez:tez-dag": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.tez:tez-mapreduce": { + "locked": "0.9.1", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.apache.tez:tez-runtime-internals": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-dag" + ] + }, + "org.apache.tez:tez-runtime-library": { + "locked": "0.9.1", + "transitive": [ + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce" + ] + }, "org.apache.thrift:libfb303": { "locked": "0.9.3", "transitive": [ + "org.apache.hive:hive-metastore", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, "org.apache.thrift:libthrift": { "locked": "0.9.3", "transitive": [ + "co.cask.tephra:tephra-core", "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-cli", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", "org.apache.thrift:libfb303" ] }, + "org.apache.twill:twill-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-common": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-api", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apache.twill:twill-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core" + ] + }, + "org.apache.twill:twill-discovery-api": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-api", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.twill:twill-discovery-core": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core" + ] + }, + "org.apache.twill:twill-zookeeper": { + "locked": "0.6.0-incubating", + "transitive": [ + "co.cask.tephra:tephra-core", + "org.apache.twill:twill-core", + "org.apache.twill:twill-discovery-core" + ] + }, + "org.apache.velocity:velocity": { + "locked": "1.5", + "transitive": [ + "org.apache.hive:hive-vector-code-gen" + ] + }, "org.apache.yetus:audience-annotations": { "locked": "0.11.0", "transitive": [ @@ -8029,6 +12683,7 @@ "org.apache.zookeeper:zookeeper": { "locked": "3.4.6", "transitive": [ + "org.apache.curator:apache-curator", "org.apache.curator:curator-client", "org.apache.curator:curator-framework", "org.apache.curator:curator-recipes", @@ -8036,7 +12691,24 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-yarn-server-common", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", - "org.apache.hive.shims:hive-shims-common" + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.shims:hive-shims-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.slider:slider-core", + "org.apache.twill:twill-zookeeper" + ] + }, + "org.apiguardian:apiguardian-api": { + "locked": "1.1.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.jupiter:junit-jupiter-params", + "org.junit.platform:junit-platform-commons", + "org.junit.platform:junit-platform-engine" ] }, "org.checkerframework:checker-qual": { @@ -8045,6 +12717,12 @@ "com.github.ben-manes.caffeine:caffeine" ] }, + "org.codehaus.groovy:groovy-all": { + "locked": "2.4.4", + "transitive": [ + "org.apache.hive:hive-exec" + ] + }, "org.codehaus.jackson:jackson-core-asl": { "locked": "1.9.13", "transitive": [ @@ -8052,6 +12730,9 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-mapper-asl", "org.codehaus.jackson:jackson-xc" @@ -8061,7 +12742,9 @@ "locked": "1.9.13", "transitive": [ "com.sun.jersey:jersey-json", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hbase:hbase-server", + "org.apache.slider:slider-core" ] }, "org.codehaus.jackson:jackson-mapper-asl": { @@ -8071,6 +12754,12 @@ "org.apache.hadoop:hadoop-common", "org.apache.hadoop:hadoop-hdfs", "org.apache.hadoop:hadoop-yarn-common", + "org.apache.hadoop:hadoop-yarn-registry", + "org.apache.hbase:hbase-client", + "org.apache.hbase:hbase-server", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.slider:slider-core", "org.codehaus.jackson:jackson-jaxrs", "org.codehaus.jackson:jackson-xc" ] @@ -8079,7 +12768,21 @@ "locked": "1.9.13", "transitive": [ "com.sun.jersey:jersey-json", - "org.apache.hadoop:hadoop-yarn-common" + "org.apache.hadoop:hadoop-yarn-common", + "org.apache.slider:slider-core" + ] + }, + "org.codehaus.janino:commons-compiler": { + "locked": "2.7.6", + "transitive": [ + "org.apache.calcite:calcite-core", + "org.codehaus.janino:janino" + ] + }, + "org.codehaus.janino:janino": { + "locked": "2.7.6", + "transitive": [ + "org.apache.calcite:calcite-core" ] }, "org.codehaus.jettison:jettison": { @@ -8091,10 +12794,36 @@ "org.apache.hadoop:hadoop-yarn-server-resourcemanager" ] }, + "org.datanucleus:datanucleus-api-jdo": { + "locked": "4.2.4", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-core": { + "locked": "4.1.17", + "transitive": [ + "org.apache.hive:hive-exec", + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:datanucleus-rdbms": { + "locked": "4.1.19", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, + "org.datanucleus:javax.jdo": { + "locked": "3.2.0-m3", + "transitive": [ + "org.apache.hive:hive-metastore" + ] + }, "org.eclipse.jetty.aggregate:jetty-all": { "locked": "7.6.0.v20120127", "transitive": [ - "org.apache.hive:hive-common" + "org.apache.hive:hive-common", + "org.apache.hive:hive-service" ] }, "org.eclipse.jetty.orbit:javax.servlet": { @@ -8121,29 +12850,124 @@ "org.mockito:mockito-core" ] }, + "org.jamon:jamon-runtime": { + "locked": "2.3.1", + "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service" + ] + }, "org.jetbrains:annotations": { "locked": "17.0.0", "transitive": [ "org.apache.orc:orc-core" ] }, + "org.jruby.jcodings:jcodings": { + "locked": "1.0.8", + "transitive": [ + "org.apache.hbase:hbase-client", + "org.jruby.joni:joni" + ] + }, + "org.jruby.joni:joni": { + "locked": "2.1.2", + "transitive": [ + "org.apache.hbase:hbase-client" + ] + }, + "org.junit.jupiter:junit-jupiter": { + "locked": "5.6.0", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.junit.jupiter:junit-jupiter-api": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter", + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.jupiter:junit-jupiter-params" + ] + }, + "org.junit.jupiter:junit-jupiter-engine": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.jupiter:junit-jupiter-params": { + "locked": "5.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.platform:junit-platform-commons": { + "locked": "1.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.junit.platform:junit-platform-engine": { + "locked": "1.6.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-engine" + ] + }, "org.mockito:mockito-core": { "locked": "1.10.19" }, "org.objenesis:objenesis": { "locked": "2.1", "transitive": [ + "com.esotericsoftware.kryo:kryo", "org.mockito:mockito-core" ] }, + "org.opentest4j:opentest4j": { + "locked": "1.2.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.ow2.asm:asm-all": { + "locked": "5.0.2", + "transitive": [ + "org.apache.twill:twill-core" + ] + }, + "org.reflections:reflections": { + "locked": "0.9.8", + "transitive": [ + "com.klarna:hiverunner" + ] + }, + "org.roaringbitmap:RoaringBitmap": { + "locked": "0.4.9", + "transitive": [ + "org.apache.tez:tez-runtime-library" + ] + }, "org.slf4j:slf4j-api": { "locked": "1.7.25", "transitive": [ + "ch.qos.logback:logback-classic", + "co.cask.tephra:tephra-core", "com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter", + "com.jolbox:bonecp", + "com.ning:async-http-client", + "com.yammer.metrics:metrics-core", + "com.zaxxer:HikariCP", "io.dropwizard.metrics:metrics-core", "io.dropwizard.metrics:metrics-json", "io.dropwizard.metrics:metrics-jvm", "org.apache.avro:avro", + "org.apache.calcite.avatica:avatica", + "org.apache.calcite.avatica:avatica-metrics", + "org.apache.calcite:calcite-core", + "org.apache.calcite:calcite-druid", "org.apache.curator:curator-client", "org.apache.directory.api:api-asn1-api", "org.apache.directory.api:api-util", @@ -8159,14 +12983,27 @@ "org.apache.hadoop:hadoop-yarn-common", "org.apache.hadoop:hadoop-yarn-server-nodemanager", "org.apache.hadoop:hadoop-yarn-server-resourcemanager", + "org.apache.hive.hcatalog:hive-hcatalog-core", + "org.apache.hive.hcatalog:hive-hcatalog-server-extensions", + "org.apache.hive.hcatalog:hive-webhcat-java-client", "org.apache.hive.shims:hive-shims-0.23", "org.apache.hive.shims:hive-shims-common", "org.apache.hive.shims:hive-shims-scheduler", + "org.apache.hive:hive-cli", "org.apache.hive:hive-common", + "org.apache.hive:hive-exec", + "org.apache.hive:hive-jdbc", + "org.apache.hive:hive-llap-client", + "org.apache.hive:hive-llap-common", + "org.apache.hive:hive-llap-server", + "org.apache.hive:hive-llap-tez", + "org.apache.hive:hive-metastore", "org.apache.hive:hive-serde", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc", "org.apache.hive:hive-shims", "org.apache.hive:hive-storage-api", + "org.apache.hive:hive-vector-code-gen", "org.apache.iceberg:iceberg-api", "org.apache.iceberg:iceberg-common", "org.apache.iceberg:iceberg-core", @@ -8178,7 +13015,16 @@ "org.apache.orc:orc-shims", "org.apache.parquet:parquet-common", "org.apache.parquet:parquet-format-structures", + "org.apache.slider:slider-core", + "org.apache.tez:hadoop-shim", + "org.apache.tez:tez-api", + "org.apache.tez:tez-dag", + "org.apache.tez:tez-mapreduce", + "org.apache.tez:tez-runtime-library", "org.apache.thrift:libthrift", + "org.apache.twill:twill-common", + "org.apache.twill:twill-core", + "org.apache.twill:twill-zookeeper", "org.apache.zookeeper:zookeeper", "org.slf4j:slf4j-simple" ] @@ -8204,15 +13050,32 @@ "org.apache.parquet:parquet-hadoop" ] }, + "oro:oro": { + "locked": "2.0.8", + "transitive": [ + "org.apache.velocity:velocity" + ] + }, + "stax:stax-api": { + "locked": "1.0.1", + "transitive": [ + "org.apache.hive:hive-exec", + "org.codehaus.jettison:jettison" + ] + }, "tomcat:jasper-compiler": { "locked": "5.5.23", "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, "tomcat:jasper-runtime": { "locked": "5.5.23", "transitive": [ + "org.apache.hbase:hbase-server", + "org.apache.hive:hive-service", "org.apache.hive:hive-service-rpc" ] }, @@ -8225,6 +13088,7 @@ "xml-apis:xml-apis": { "locked": "1.3.04", "transitive": [ + "dom4j:dom4j", "xerces:xercesImpl" ] }, diff --git a/mr/src/main/java/org/apache/iceberg/mr/IcebergRecordReader.java b/mr/src/main/java/org/apache/iceberg/mr/IcebergRecordReader.java new file mode 100644 index 000000000000..db37243f80d6 --- /dev/null +++ b/mr/src/main/java/org/apache/iceberg/mr/IcebergRecordReader.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.mr; + +import org.apache.hadoop.conf.Configuration; +import org.apache.iceberg.DataFile; +import org.apache.iceberg.FileScanTask; +import org.apache.iceberg.Schema; +import org.apache.iceberg.StructLike; +import org.apache.iceberg.avro.Avro; +import org.apache.iceberg.data.avro.DataReader; +import org.apache.iceberg.data.orc.GenericOrcReader; +import org.apache.iceberg.data.parquet.GenericParquetReaders; +import org.apache.iceberg.expressions.Evaluator; +import org.apache.iceberg.expressions.Expression; +import org.apache.iceberg.expressions.Expressions; +import org.apache.iceberg.hadoop.HadoopInputFile; +import org.apache.iceberg.io.CloseableIterable; +import org.apache.iceberg.io.InputFile; +import org.apache.iceberg.orc.ORC; +import org.apache.iceberg.parquet.Parquet; + +public class IcebergRecordReader { + + private boolean applyResidual; + private boolean caseSensitive; + private boolean reuseContainers; + + private void initialize(Configuration conf) { + this.applyResidual = !conf.getBoolean(InputFormatConfig.SKIP_RESIDUAL_FILTERING, false); + this.caseSensitive = conf.getBoolean(InputFormatConfig.CASE_SENSITIVE, true); + this.reuseContainers = conf.getBoolean(InputFormatConfig.REUSE_CONTAINERS, false); + } + + public CloseableIterable createReader(Configuration config, FileScanTask currentTask, Schema readSchema) { + initialize(config); + DataFile file = currentTask.file(); + // TODO we should make use of FileIO to create inputFile + InputFile inputFile = HadoopInputFile.fromLocation(file.path(), config); + switch (file.format()) { + case AVRO: + return newAvroIterable(inputFile, currentTask, readSchema); + case ORC: + return newOrcIterable(inputFile, currentTask, readSchema); + case PARQUET: + return newParquetIterable(inputFile, currentTask, readSchema); + default: + throw new UnsupportedOperationException( + String.format("Cannot read %s file: %s", file.format().name(), file.path())); + } + } + + private CloseableIterable newAvroIterable(InputFile inputFile, FileScanTask task, Schema readSchema) { + Avro.ReadBuilder avroReadBuilder = Avro.read(inputFile).project(readSchema).split(task.start(), task.length()); + if (reuseContainers) { + avroReadBuilder.reuseContainers(); + } + avroReadBuilder.createReaderFunc(DataReader::create); + return applyResidualFiltering(avroReadBuilder.build(), task.residual(), readSchema); + } + + private CloseableIterable newParquetIterable(InputFile inputFile, FileScanTask task, Schema readSchema) { + Parquet.ReadBuilder parquetReadBuilder = Parquet + .read(inputFile) + .project(readSchema) + .filter(task.residual()) + .caseSensitive(caseSensitive) + .split(task.start(), task.length()); + if (reuseContainers) { + parquetReadBuilder.reuseContainers(); + } + + parquetReadBuilder.createReaderFunc(fileSchema -> GenericParquetReaders.buildReader(readSchema, fileSchema)); + + return applyResidualFiltering(parquetReadBuilder.build(), task.residual(), readSchema); + } + + private CloseableIterable newOrcIterable(InputFile inputFile, FileScanTask task, Schema readSchema) { + ORC.ReadBuilder orcReadBuilder = ORC + .read(inputFile) + .project(readSchema) + .caseSensitive(caseSensitive) + .split(task.start(), task.length()); + // ORC does not support reuse containers yet + orcReadBuilder.createReaderFunc(fileSchema -> GenericOrcReader.buildReader(readSchema, fileSchema)); + return applyResidualFiltering(orcReadBuilder.build(), task.residual(), readSchema); + } + + private CloseableIterable applyResidualFiltering(CloseableIterable iter, Expression residual, Schema readSchema) { + if (applyResidual && residual != null && residual != Expressions.alwaysTrue()) { + Evaluator filter = new Evaluator(readSchema.asStruct(), residual, caseSensitive); + return CloseableIterable.filter(iter, record -> filter.eval((StructLike) record)); + } else { + return iter; + } + } + +} diff --git a/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergFilterFactory.java b/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergFilterFactory.java new file mode 100644 index 000000000000..ada7b78fa94a --- /dev/null +++ b/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergFilterFactory.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.mr.mapred; + +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Timestamp; +import java.util.List; +import org.apache.hadoop.hive.ql.io.sarg.ExpressionTree; +import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf; +import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.iceberg.expressions.Expression; +import org.apache.iceberg.expressions.Expressions; + +import static org.apache.iceberg.expressions.Expressions.and; +import static org.apache.iceberg.expressions.Expressions.equal; +import static org.apache.iceberg.expressions.Expressions.greaterThanOrEqual; +import static org.apache.iceberg.expressions.Expressions.in; +import static org.apache.iceberg.expressions.Expressions.isNull; +import static org.apache.iceberg.expressions.Expressions.lessThan; +import static org.apache.iceberg.expressions.Expressions.lessThanOrEqual; +import static org.apache.iceberg.expressions.Expressions.not; +import static org.apache.iceberg.expressions.Expressions.notNull; +import static org.apache.iceberg.expressions.Expressions.or; + +public class IcebergFilterFactory { + + private IcebergFilterFactory() {} + + public static Expression generateFilterExpression(SearchArgument sarg) { + return translate(sarg.getExpression(), sarg.getLeaves()); + } + + /** + * Recursive method to traverse down the ExpressionTree to evaluate each expression and its leaf nodes. + * @param tree Current ExpressionTree where the 'top' node is being evaluated. + * @param leaves List of all leaf nodes within the tree. + * @return Expression that is translated from the Hive SearchArgument. + */ + private static Expression translate(ExpressionTree tree, List leaves) { + List childNodes = tree.getChildren(); + switch (tree.getOperator()) { + case OR: + Expression orResult = Expressions.alwaysFalse(); + for (ExpressionTree child : childNodes) { + orResult = or(orResult, translate(child, leaves)); + } + return orResult; + case AND: + Expression result = Expressions.alwaysTrue(); + for (ExpressionTree child : childNodes) { + result = and(result, translate(child, leaves)); + } + return result; + case NOT: + return not(translate(tree.getChildren().get(0), leaves)); + case LEAF: + return translateLeaf(leaves.get(tree.getLeaf())); + case CONSTANT: + //We are unsure of how the CONSTANT case works, so using the approach of: + //https://github.com/apache/hive/blob/master/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ + // ParquetFilterPredicateConverter.java#L116 + throw new UnsupportedOperationException("CONSTANT operator is not supported"); + default: + throw new IllegalStateException("Unknown operator: " + tree.getOperator()); + } + } + + /** + * Translate leaf nodes from Hive operator to Iceberg operator. + * @param leaf Leaf node + * @return Expression fully translated from Hive PredicateLeaf + */ + private static Expression translateLeaf(PredicateLeaf leaf) { + String column = leaf.getColumnName(); + switch (leaf.getOperator()) { + case EQUALS: + return equal(column, leafToIcebergType(leaf)); + case NULL_SAFE_EQUALS: + return equal(notNull(column).ref().name(), leafToIcebergType(leaf)); //TODO: Unsure.. + case LESS_THAN: + return lessThan(column, leafToIcebergType(leaf)); + case LESS_THAN_EQUALS: + return lessThanOrEqual(column, leafToIcebergType(leaf)); + case IN: + return in(column, (List) leafToIcebergType(leaf)); + case BETWEEN: + List icebergLiterals = leaf.getLiteralList(); + return and(greaterThanOrEqual(column, icebergLiterals.get(0)), + lessThanOrEqual(column, icebergLiterals.get(1))); + case IS_NULL: + return isNull(column); + default: + throw new IllegalStateException("Unknown operator: " + leaf.getOperator()); + } + } + + private static Object leafToIcebergType(PredicateLeaf leaf) { + switch (leaf.getType()) { + case LONG: + return leaf.getLiteral() != null ? leaf.getLiteral() : leaf.getLiteralList(); + case FLOAT: + return leaf.getLiteral() != null ? leaf.getLiteral() : leaf.getLiteralList(); + case STRING: + return leaf.getLiteral() != null ? leaf.getLiteral() : leaf.getLiteralList(); + case DATE: + //Hive converts a Date type to a Timestamp internally when retrieving literal + if (leaf.getLiteral() != null) { + return ((Timestamp) leaf.getLiteral()).toLocalDateTime().toLocalDate().toEpochDay(); + } else { + //But not when retrieving the literalList + List icebergValues = leaf.getLiteralList(); + icebergValues.replaceAll(value -> ((Date) value).toLocalDate().toEpochDay()); + return icebergValues; + } + case DECIMAL: + if (leaf.getLiteral() != null) { + return BigDecimal.valueOf(((HiveDecimalWritable) leaf.getLiteral()).doubleValue()); + } else { + List icebergValues = leaf.getLiteralList(); + icebergValues.replaceAll(value -> BigDecimal.valueOf(((HiveDecimalWritable) value).doubleValue())); + return icebergValues; + } + case TIMESTAMP: + if (leaf.getLiteral() != null) { + Timestamp timestamp = (Timestamp) leaf.getLiteral(); + return timestamp.toInstant().getEpochSecond() * 1000000 + timestamp.getNanos() / 1000; + } else { + List icebergValues = leaf.getLiteralList(); + icebergValues.replaceAll(value -> ( + (Timestamp) value).toInstant().getEpochSecond() * 1000000 + ((Timestamp) value).getNanos() / 1000); + return icebergValues; + } + case BOOLEAN: + return leaf.getLiteral() != null ? leaf.getLiteral() : leaf.getLiteralList(); + default: + throw new IllegalStateException("Unknown type: " + leaf.getType()); + } + } +} diff --git a/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergInputFormat.java b/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergInputFormat.java new file mode 100644 index 000000000000..d451b497870c --- /dev/null +++ b/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergInputFormat.java @@ -0,0 +1,254 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.mr.mapred; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.exec.SerializationUtilities; +import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; +import org.apache.hadoop.hive.ql.io.sarg.ConvertAstToSearchArg; +import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.plan.TableScanDesc; +import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; +import org.apache.hadoop.mapred.FileSplit; +import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.mapred.InputSplit; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.RecordReader; +import org.apache.hadoop.mapred.Reporter; +import org.apache.iceberg.CombinedScanTask; +import org.apache.iceberg.FileScanTask; +import org.apache.iceberg.Schema; +import org.apache.iceberg.Table; +import org.apache.iceberg.data.Record; +import org.apache.iceberg.expressions.Expression; +import org.apache.iceberg.io.CloseableIterable; +import org.apache.iceberg.mr.InputFormatConfig; +import org.apache.iceberg.mr.SerializationUtil; +import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * CombineHiveInputFormat.AvoidSplitCombination is implemented to correctly delegate InputSplit + * creation to this class. See: https://stackoverflow.com/questions/29133275/ + * custom-inputformat-getsplits-never-called-in-hive + */ +public class IcebergInputFormat implements InputFormat, CombineHiveInputFormat.AvoidSplitCombination { + private static final Logger LOG = LoggerFactory.getLogger(IcebergInputFormat.class); + + private Table table; + + @Override + public InputSplit[] getSplits(JobConf conf, int numSplits) throws IOException { + table = TableResolver.resolveTableFromConfiguration(conf); + String location = conf.get(InputFormatConfig.TABLE_LOCATION); + List tasks = planTasks(conf); + return createSplits(tasks, location); + } + + private List planTasks(JobConf conf) { + String[] readColumns = ColumnProjectionUtils.getReadColumnNames(conf); + List tasks; + if (conf.get(TableScanDesc.FILTER_EXPR_CONF_STR) == null) { + tasks = Lists.newArrayList(table + .newScan() + .select(readColumns) + .planTasks()); + } else { + ExprNodeGenericFuncDesc exprNodeDesc = SerializationUtilities + .deserializeObject(conf.get(TableScanDesc.FILTER_EXPR_CONF_STR), ExprNodeGenericFuncDesc.class); + SearchArgument sarg = ConvertAstToSearchArg.create(conf, exprNodeDesc); + Expression filter = IcebergFilterFactory.generateFilterExpression(sarg); + + tasks = Lists.newArrayList(table + .newScan() + .select(readColumns) + .filter(filter) + .planTasks()); + } + return tasks; + } + + private InputSplit[] createSplits(List tasks, String name) { + InputSplit[] splits = new InputSplit[tasks.size()]; + for (int i = 0; i < tasks.size(); i++) { + splits[i] = new IcebergSplit(tasks.get(i), name); + } + return splits; + } + + @Override + public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { + return new IcebergRecordReader(split, job); + } + + @Override + public boolean shouldSkipCombine(Path path, Configuration conf) throws IOException { + return true; + } + + public class IcebergRecordReader implements RecordReader { + private JobConf conf; + private IcebergSplit split; + + private Iterator tasks; + private CloseableIterable reader; + private Iterator recordIterator; + private Record currentRecord; + + public IcebergRecordReader(InputSplit split, JobConf conf) throws IOException { + this.split = (IcebergSplit) split; + this.conf = conf; + initialise(); + } + + private void initialise() { + tasks = split.getTask().files().iterator(); + nextTask(); + } + + private void nextTask() { + FileScanTask currentTask = tasks.next(); + Schema tableSchema = table.schema(); + org.apache.iceberg.mr.IcebergRecordReader wrappedReader = + new org.apache.iceberg.mr.IcebergRecordReader(); + reader = wrappedReader.createReader(conf, currentTask, tableSchema); + recordIterator = reader.iterator(); + } + + + @Override + public boolean next(Void key, IcebergWritable value) { + if (recordIterator.hasNext()) { + currentRecord = recordIterator.next(); + value.wrapRecord(currentRecord); + return true; + } + + if (tasks.hasNext()) { + nextTask(); + currentRecord = recordIterator.next(); + value.wrapRecord(currentRecord); + return true; + } + return false; + } + + @Override + public Void createKey() { + return null; + } + + @Override + public IcebergWritable createValue() { + IcebergWritable record = new IcebergWritable(); + record.wrapRecord(currentRecord); + record.wrapSchema(table.schema()); + return record; + } + + @Override + public long getPos() throws IOException { + return 0; + } + + @Override + public void close() throws IOException { + reader.close(); + } + + @Override + public float getProgress() throws IOException { + return 0; + } + } + + /** + * FileSplit is extended rather than implementing the InputSplit interface due to Hive's HiveInputFormat + * expecting a split which is an instance of FileSplit. + */ + private static class IcebergSplit extends FileSplit { + + private CombinedScanTask task; + private String partitionLocation; + + IcebergSplit() { + } + + IcebergSplit(CombinedScanTask task, String partitionLocation) { + this.task = task; + this.partitionLocation = partitionLocation; + } + + @Override + public long getLength() { + return task.files().stream().mapToLong(FileScanTask::length).sum(); + } + + @Override + public String[] getLocations() throws IOException { + return new String[0]; + } + + @Override + public Path getPath() { + return new Path(partitionLocation); + } + + @Override + public long getStart() { + return 0L; + } + + @Override + public void write(DataOutput out) throws IOException { + byte[] dataTask = SerializationUtil.serializeToBytes(this.task); + out.writeInt(dataTask.length); + out.write(dataTask); + + byte[] tableName = SerializationUtil.serializeToBytes(this.partitionLocation); + out.writeInt(tableName.length); + out.write(tableName); + } + + @Override + public void readFields(DataInput in) throws IOException { + byte[] data = new byte[in.readInt()]; + in.readFully(data); + this.task = SerializationUtil.deserializeFromBytes(data); + + byte[] name = new byte[in.readInt()]; + in.readFully(name); + this.partitionLocation = SerializationUtil.deserializeFromBytes(name); + } + + public CombinedScanTask getTask() { + return task; + } + } + +} diff --git a/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergWritable.java b/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergWritable.java index 1eb67f9d3158..df9a4e39fd2d 100644 --- a/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergWritable.java +++ b/mr/src/main/java/org/apache/iceberg/mr/mapred/IcebergWritable.java @@ -33,6 +33,9 @@ public class IcebergWritable implements Writable { private Record record; private Schema schema; + public IcebergWritable() { + } + public IcebergWritable(Record record, Schema schema) { this.record = record; this.schema = schema; diff --git a/mr/src/main/java/org/apache/iceberg/mr/mapreduce/IcebergInputFormat.java b/mr/src/main/java/org/apache/iceberg/mr/mapreduce/IcebergInputFormat.java index e8f76ab63cd2..9d138b93bdcd 100644 --- a/mr/src/main/java/org/apache/iceberg/mr/mapreduce/IcebergInputFormat.java +++ b/mr/src/main/java/org/apache/iceberg/mr/mapreduce/IcebergInputFormat.java @@ -22,13 +22,8 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import java.util.Collections; import java.util.Iterator; import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.BiFunction; -import java.util.function.Function; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.InputFormat; @@ -38,42 +33,21 @@ import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.iceberg.CombinedScanTask; -import org.apache.iceberg.DataFile; import org.apache.iceberg.FileScanTask; -import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.SchemaParser; -import org.apache.iceberg.StructLike; import org.apache.iceberg.Table; import org.apache.iceberg.TableProperties; import org.apache.iceberg.TableScan; -import org.apache.iceberg.avro.Avro; -import org.apache.iceberg.catalog.Catalog; -import org.apache.iceberg.catalog.TableIdentifier; -import org.apache.iceberg.common.DynConstructors; -import org.apache.iceberg.data.IdentityPartitionConverters; -import org.apache.iceberg.data.avro.DataReader; -import org.apache.iceberg.data.orc.GenericOrcReader; -import org.apache.iceberg.data.parquet.GenericParquetReaders; import org.apache.iceberg.exceptions.RuntimeIOException; -import org.apache.iceberg.expressions.Evaluator; import org.apache.iceberg.expressions.Expression; import org.apache.iceberg.expressions.Expressions; -import org.apache.iceberg.hadoop.HadoopInputFile; -import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.hadoop.Util; import org.apache.iceberg.io.CloseableIterable; import org.apache.iceberg.io.CloseableIterator; -import org.apache.iceberg.io.InputFile; import org.apache.iceberg.mr.InputFormatConfig; import org.apache.iceberg.mr.SerializationUtil; -import org.apache.iceberg.orc.ORC; -import org.apache.iceberg.parquet.Parquet; -import org.apache.iceberg.relocated.com.google.common.base.Preconditions; import org.apache.iceberg.relocated.com.google.common.collect.Lists; -import org.apache.iceberg.types.Type; -import org.apache.iceberg.types.TypeUtil; -import org.apache.iceberg.util.PartitionUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -106,9 +80,31 @@ public List getSplits(JobContext context) { } Configuration conf = context.getConfiguration(); - Table table = findTable(conf); - TableScan scan = table.newScan() - .caseSensitive(conf.getBoolean(InputFormatConfig.CASE_SENSITIVE, true)); + Table table = TableResolver.findTable(conf); + TableScan scan = createTableScan(conf, table); + + splits = Lists.newArrayList(); + boolean applyResidual = !conf.getBoolean(InputFormatConfig.SKIP_RESIDUAL_FILTERING, false); + InputFormatConfig.InMemoryDataModel model = conf.getEnum(InputFormatConfig.IN_MEMORY_DATA_MODEL, + InputFormatConfig.InMemoryDataModel.GENERIC); + try (CloseableIterable tasksIterable = scan.planTasks()) { + tasksIterable.forEach(task -> { + if (applyResidual && (model == InputFormatConfig.InMemoryDataModel.HIVE || + model == InputFormatConfig.InMemoryDataModel.PIG)) { + //TODO: We do not support residual evaluation for HIVE and PIG in memory data model yet + checkResiduals(task); + } + splits.add(new IcebergSplit(conf, task)); + }); + } catch (IOException e) { + throw new RuntimeIOException(e, "Failed to close table scan: %s", scan); + } + + return splits; + } + + private TableScan createTableScan(Configuration conf, Table table) { + TableScan scan = table.newScan().caseSensitive(conf.getBoolean(InputFormatConfig.CASE_SENSITIVE, true)); long snapshotId = conf.getLong(InputFormatConfig.SNAPSHOT_ID, -1); if (snapshotId != -1) { scan = scan.useSnapshot(snapshotId); @@ -131,25 +127,7 @@ public List getSplits(JobContext context) { if (filter != null) { scan = scan.filter(filter); } - - splits = Lists.newArrayList(); - boolean applyResidual = !conf.getBoolean(InputFormatConfig.SKIP_RESIDUAL_FILTERING, false); - InputFormatConfig.InMemoryDataModel model = conf.getEnum(InputFormatConfig.IN_MEMORY_DATA_MODEL, - InputFormatConfig.InMemoryDataModel.GENERIC); - try (CloseableIterable tasksIterable = scan.planTasks()) { - tasksIterable.forEach(task -> { - if (applyResidual && (model == InputFormatConfig.InMemoryDataModel.HIVE || - model == InputFormatConfig.InMemoryDataModel.PIG)) { - //TODO: We do not support residual evaluation for HIVE and PIG in memory data model yet - checkResiduals(task); - } - splits.add(new IcebergSplit(conf, task)); - }); - } catch (IOException e) { - throw new RuntimeIOException(e, "Failed to close table scan: %s", scan); - } - - return splits; + return scan; } private static void checkResiduals(CombinedScanTask task) { @@ -157,9 +135,9 @@ private static void checkResiduals(CombinedScanTask task) { Expression residual = fileScanTask.residual(); if (residual != null && !residual.equals(Expressions.alwaysTrue())) { throw new UnsupportedOperationException( - String.format( - "Filter expression %s is not completely satisfied. Additional rows " + - "can be returned not satisfied by the filter expression", residual)); + String.format( + "Filter expression %s is not completely satisfied. Additional rows " + + "can be returned not satisfied by the filter expression", residual)); } }); } @@ -240,141 +218,11 @@ public void close() throws IOException { } private CloseableIterable open(FileScanTask currentTask, Schema readSchema) { - DataFile file = currentTask.file(); - // TODO we should make use of FileIO to create inputFile - InputFile inputFile = HadoopInputFile.fromLocation(file.path(), context.getConfiguration()); - CloseableIterable iterable; - switch (file.format()) { - case AVRO: - iterable = newAvroIterable(inputFile, currentTask, readSchema); - break; - case ORC: - iterable = newOrcIterable(inputFile, currentTask, readSchema); - break; - case PARQUET: - iterable = newParquetIterable(inputFile, currentTask, readSchema); - break; - default: - throw new UnsupportedOperationException( - String.format("Cannot read %s file: %s", file.format().name(), file.path())); - } - + org.apache.iceberg.mr.IcebergRecordReader wrappedReader = new org.apache.iceberg.mr.IcebergRecordReader(); + CloseableIterable iterable = wrappedReader.createReader(context.getConfiguration(), currentTask, readSchema); return iterable; } - private CloseableIterable applyResidualFiltering(CloseableIterable iter, Expression residual, - Schema readSchema) { - boolean applyResidual = !context.getConfiguration().getBoolean(InputFormatConfig.SKIP_RESIDUAL_FILTERING, false); - - if (applyResidual && residual != null && residual != Expressions.alwaysTrue()) { - Evaluator filter = new Evaluator(readSchema.asStruct(), residual, caseSensitive); - return CloseableIterable.filter(iter, record -> filter.eval((StructLike) record)); - } else { - return iter; - } - } - - private CloseableIterable newAvroIterable( - InputFile inputFile, FileScanTask task, Schema readSchema) { - Avro.ReadBuilder avroReadBuilder = Avro.read(inputFile) - .project(readSchema) - .split(task.start(), task.length()); - if (reuseContainers) { - avroReadBuilder.reuseContainers(); - } - - switch (inMemoryDataModel) { - case PIG: - case HIVE: - //TODO implement value readers for Pig and Hive - throw new UnsupportedOperationException("Avro support not yet supported for Pig and Hive"); - case GENERIC: - avroReadBuilder.createReaderFunc( - (expIcebergSchema, expAvroSchema) -> - DataReader.create(expIcebergSchema, expAvroSchema, - constantsMap(task, IdentityPartitionConverters::convertConstant))); - } - return applyResidualFiltering(avroReadBuilder.build(), task.residual(), readSchema); - } - - private CloseableIterable newParquetIterable(InputFile inputFile, FileScanTask task, Schema readSchema) { - Parquet.ReadBuilder parquetReadBuilder = Parquet.read(inputFile) - .project(readSchema) - .filter(task.residual()) - .caseSensitive(caseSensitive) - .split(task.start(), task.length()); - if (reuseContainers) { - parquetReadBuilder.reuseContainers(); - } - - switch (inMemoryDataModel) { - case PIG: - case HIVE: - //TODO implement value readers for Pig and Hive - throw new UnsupportedOperationException("Parquet support not yet supported for Pig and Hive"); - case GENERIC: - parquetReadBuilder.createReaderFunc( - fileSchema -> GenericParquetReaders.buildReader( - readSchema, fileSchema, constantsMap(task, IdentityPartitionConverters::convertConstant))); - } - return applyResidualFiltering(parquetReadBuilder.build(), task.residual(), readSchema); - } - - private CloseableIterable newOrcIterable(InputFile inputFile, FileScanTask task, Schema readSchema) { - ORC.ReadBuilder orcReadBuilder = ORC.read(inputFile) - .project(readSchema) - .filter(task.residual()) - .caseSensitive(caseSensitive) - .split(task.start(), task.length()); - // ORC does not support reuse containers yet - switch (inMemoryDataModel) { - case PIG: - case HIVE: - //TODO: implement value readers for Pig and Hive - throw new UnsupportedOperationException("ORC support not yet supported for Pig and Hive"); - case GENERIC: - orcReadBuilder.createReaderFunc( - fileSchema -> GenericOrcReader.buildReader( - readSchema, fileSchema, constantsMap(task, IdentityPartitionConverters::convertConstant))); - } - - return applyResidualFiltering(orcReadBuilder.build(), task.residual(), readSchema); - } - - private Map constantsMap(FileScanTask task, BiFunction converter) { - PartitionSpec spec = task.spec(); - Set idColumns = spec.identitySourceIds(); - Schema partitionSchema = TypeUtil.select(expectedSchema, idColumns); - boolean projectsIdentityPartitionColumns = !partitionSchema.columns().isEmpty(); - if (projectsIdentityPartitionColumns) { - return PartitionUtil.constantsMap(task, converter); - } else { - return Collections.emptyMap(); - } - } - } - - private static Table findTable(Configuration conf) { - String path = conf.get(InputFormatConfig.TABLE_PATH); - Preconditions.checkArgument(path != null, "Table path should not be null"); - if (path.contains("/")) { - HadoopTables tables = new HadoopTables(conf); - return tables.load(path); - } - - String catalogFuncClass = conf.get(InputFormatConfig.CATALOG); - if (catalogFuncClass != null) { - Function catalogFunc = (Function) - DynConstructors.builder(Function.class) - .impl(catalogFuncClass) - .build() - .newInstance(); - Catalog catalog = catalogFunc.apply(conf); - TableIdentifier tableIdentifier = TableIdentifier.parse(path); - return catalog.loadTable(tableIdentifier); - } else { - throw new IllegalArgumentException("No custom catalog specified to load table " + path); - } } static class IcebergSplit extends InputSplit implements Writable { @@ -421,3 +269,4 @@ public void readFields(DataInput in) throws IOException { } } } + diff --git a/mr/src/main/java/org/apache/iceberg/mr/mapreduce/TableResolver.java b/mr/src/main/java/org/apache/iceberg/mr/mapreduce/TableResolver.java new file mode 100644 index 000000000000..7775e7ea9409 --- /dev/null +++ b/mr/src/main/java/org/apache/iceberg/mr/mapreduce/TableResolver.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.mr.mapreduce; + +import java.util.function.Function; +import org.apache.hadoop.conf.Configuration; +import org.apache.iceberg.Table; +import org.apache.iceberg.catalog.Catalog; +import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.common.DynConstructors; +import org.apache.iceberg.hadoop.HadoopTables; +import org.apache.iceberg.mr.InputFormatConfig; +import org.apache.iceberg.relocated.com.google.common.base.Preconditions; + +final class TableResolver { + + private TableResolver() { + } + + public static Table findTable(Configuration conf) { + String path = conf.get(InputFormatConfig.TABLE_PATH); + Preconditions.checkArgument(path != null, "Table path should not be null"); + if (path.contains("/")) { + HadoopTables tables = new HadoopTables(conf); + return tables.load(path); + } + + String catalogFuncClass = conf.get(InputFormatConfig.CATALOG); + if (catalogFuncClass != null) { + Function catalogFunc = (Function) + DynConstructors.builder(Function.class) + .impl(catalogFuncClass) + .build() + .newInstance(); + Catalog catalog = catalogFunc.apply(conf); + TableIdentifier tableIdentifier = TableIdentifier.parse(path); + return catalog.loadTable(tableIdentifier); + } else { + throw new IllegalArgumentException("No custom catalog specified to load table " + path); + } + } + +} diff --git a/mr/src/test/java/org/apache/iceberg/mr/TestHelpers.java b/mr/src/test/java/org/apache/iceberg/mr/TestHelpers.java new file mode 100644 index 000000000000..8db577f00ed4 --- /dev/null +++ b/mr/src/test/java/org/apache/iceberg/mr/TestHelpers.java @@ -0,0 +1,153 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.mr; + +import java.io.File; +import java.io.IOException; +import java.util.List; +import org.apache.iceberg.DataFile; +import org.apache.iceberg.DataFiles; +import org.apache.iceberg.FileFormat; +import org.apache.iceberg.Files; +import org.apache.iceberg.Schema; +import org.apache.iceberg.StructLike; +import org.apache.iceberg.Table; +import org.apache.iceberg.avro.Avro; +import org.apache.iceberg.data.GenericRecord; +import org.apache.iceberg.data.Record; +import org.apache.iceberg.data.avro.DataWriter; +import org.apache.iceberg.data.orc.GenericOrcWriter; +import org.apache.iceberg.data.parquet.GenericParquetWriter; +import org.apache.iceberg.io.FileAppender; +import org.apache.iceberg.orc.ORC; +import org.apache.iceberg.parquet.Parquet; +import org.apache.iceberg.types.Types; + +import static org.apache.iceberg.types.Types.NestedField.optional; +import static org.apache.iceberg.types.Types.NestedField.required; + +public class TestHelpers { + + private TestHelpers() {} + + /** + * Implements {@link StructLike#get} for passing data in tests. + */ + public static class Row implements StructLike { + public static Row of(Object... values) { + return new Row(values); + } + + private final Object[] values; + + private Row(Object... values) { + this.values = values; + } + + @Override + public int size() { + return values.length; + } + + @Override + @SuppressWarnings("unchecked") + public T get(int pos, Class javaClass) { + return javaClass.cast(values[pos]); + } + + @Override + public void set(int pos, T value) { + throw new UnsupportedOperationException("Setting values is not supported"); + } + } + + public static DataFile writeFile(File targetFile, + Table table, StructLike partitionData, FileFormat fileFormat, List records) throws IOException { + if (targetFile.exists()) { + if (!targetFile.delete()) { + throw new IOException("Unable to delete " + targetFile.getAbsolutePath()); + } + } + FileAppender appender; + switch (fileFormat) { + case AVRO: + appender = Avro.write(Files.localOutput(targetFile)) + .schema(table.schema()) + .createWriterFunc(DataWriter::create) + .named(fileFormat.name()) + .build(); + break; + case PARQUET: + appender = Parquet.write(Files.localOutput(targetFile)) + .schema(table.schema()) + .createWriterFunc(GenericParquetWriter::buildWriter) + .named(fileFormat.name()) + .build(); + break; + case ORC: + appender = ORC.write(Files.localOutput(targetFile)) + .schema(table.schema()) + .createWriterFunc(GenericOrcWriter::buildWriter) + .build(); + break; + default: + throw new UnsupportedOperationException("Cannot write format: " + fileFormat); + } + + try { + appender.addAll(records); + } finally { + appender.close(); + } + + DataFiles.Builder builder = DataFiles.builder(table.spec()) + .withPath(targetFile.toString()) + .withFormat(fileFormat) + .withFileSizeInBytes(targetFile.length()) + .withMetrics(appender.metrics()); + if (partitionData != null) { + builder.withPartition(partitionData); + } + return builder.build(); + } + + /** + * Based on: https://github.com/apache/incubator-iceberg/blob/master/ + * spark/src/test/java/org/apache/iceberg/spark/source/SimpleRecord.java + */ + public static Record createSimpleRecord(long id, String data) { + Schema schema = new Schema(required(1, "id", Types.StringType.get()), + optional(2, "data", Types.LongType.get())); + GenericRecord record = GenericRecord.create(schema); + record.setField("id", id); + record.setField("data", data); + return record; + } + + public static Record createCustomRecord(Schema schema, List dataValues) { + GenericRecord record = GenericRecord.create(schema); + List fields = schema.columns(); + for (int i = 0; i < fields.size(); i++) { + record.setField(fields.get(i).name(), dataValues.get(i)); + } + return record; + } + +} diff --git a/mr/src/test/java/org/apache/iceberg/mr/mapred/TestHiveIcebergInputFormat.java b/mr/src/test/java/org/apache/iceberg/mr/mapred/TestHiveIcebergInputFormat.java new file mode 100644 index 000000000000..80ec34ec0487 --- /dev/null +++ b/mr/src/test/java/org/apache/iceberg/mr/mapred/TestHiveIcebergInputFormat.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.mr.mapred; + +import com.klarna.hiverunner.HiveShell; +import com.klarna.hiverunner.StandaloneHiveRunner; +import com.klarna.hiverunner.annotations.HiveSQL; +import java.io.File; +import java.io.IOException; +import java.util.List; +import org.apache.iceberg.PartitionSpec; +import org.apache.iceberg.Schema; +import org.apache.iceberg.Table; +import org.apache.iceberg.hadoop.HadoopTables; +import org.apache.iceberg.types.Types; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; + +import static org.apache.iceberg.types.Types.NestedField.optional; +import static org.apache.iceberg.types.Types.NestedField.required; +import static org.junit.Assert.assertEquals; + +@RunWith(StandaloneHiveRunner.class) +public class TestHiveIcebergInputFormat { + + @HiveSQL(files = {}, autoStart = true) + private HiveShell shell; + + @Rule + public TemporaryFolder temp = new TemporaryFolder(); + + private File tableLocation; + + @Before + public void before() throws IOException { + tableLocation = temp.newFolder(); + Schema schema = new Schema(required(1, "id", Types.LongType.get()), + optional(2, "data", Types.StringType.get())); + PartitionSpec spec = PartitionSpec.unpartitioned(); + + HadoopTables tables = new HadoopTables(); + Table table = tables.create(schema, spec, tableLocation.getAbsolutePath()); + } + + @Test + public void emptyTable() { + shell.execute("CREATE DATABASE source_db"); + shell.execute(new StringBuilder() + .append("CREATE TABLE source_db.table_a ") + .append("(id INT, data STRING) ") + .append("STORED AS ") + .append("INPUTFORMAT 'org.apache.iceberg.mr.mapred.IcebergInputFormat' ") + .append("OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ") + .append("LOCATION '") + .append(tableLocation.getAbsolutePath()) + .append("' TBLPROPERTIES ('iceberg.catalog'='hadoop.tables'") + .append(")") + .toString()); + + List result = shell.executeStatement("SELECT id, data FROM source_db.table_a"); + + assertEquals(0, result.size()); + } + + //TODO: when HiveSerde and StorageHandlers merged in, move over additional tests from Hiveberg +} diff --git a/mr/src/test/java/org/apache/iceberg/mr/mapred/TestIcebergFilterFactory.java b/mr/src/test/java/org/apache/iceberg/mr/mapred/TestIcebergFilterFactory.java new file mode 100644 index 000000000000..4c5338e13233 --- /dev/null +++ b/mr/src/test/java/org/apache/iceberg/mr/mapred/TestIcebergFilterFactory.java @@ -0,0 +1,327 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.mr.mapred; + +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Timestamp; +import java.time.LocalDate; +import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf; +import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; +import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.iceberg.expressions.And; +import org.apache.iceberg.expressions.Expressions; +import org.apache.iceberg.expressions.Not; +import org.apache.iceberg.expressions.Or; +import org.apache.iceberg.expressions.UnboundPredicate; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class TestIcebergFilterFactory { + + @Test + public void testEqualsOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder.startAnd().equals("salary", PredicateLeaf.Type.LONG, 3000L).end().build(); + + UnboundPredicate expected = Expressions.equal("salary", 3000L); + UnboundPredicate actual = (UnboundPredicate) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.literal(), expected.literal()); + assertEquals(actual.ref().name(), expected.ref().name()); + } + + @Test + public void testNotEqualsOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder.startNot().equals("salary", PredicateLeaf.Type.LONG, 3000L).end().build(); + + Not expected = (Not) Expressions.not(Expressions.equal("salary", 3000L)); + Not actual = (Not) IcebergFilterFactory.generateFilterExpression(arg); + + UnboundPredicate childExpressionActual = (UnboundPredicate) actual.child(); + UnboundPredicate childExpressionExpected = Expressions.equal("salary", 3000L); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.child().op(), expected.child().op()); + assertEquals(childExpressionActual.ref().name(), childExpressionExpected.ref().name()); + assertEquals(childExpressionActual.literal(), childExpressionExpected.literal()); + } + + @Test + public void testLessThanOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder.startAnd().lessThan("salary", PredicateLeaf.Type.LONG, 3000L).end().build(); + + UnboundPredicate expected = Expressions.lessThan("salary", 3000L); + UnboundPredicate actual = (UnboundPredicate) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.literal(), expected.literal()); + assertEquals(actual.ref().name(), expected.ref().name()); + } + + @Test + public void testLessThanEqualsOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder.startAnd().lessThanEquals("salary", PredicateLeaf.Type.LONG, 3000L).end().build(); + + UnboundPredicate expected = Expressions.lessThanOrEqual("salary", 3000L); + UnboundPredicate actual = (UnboundPredicate) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.literal(), expected.literal()); + assertEquals(actual.ref().name(), expected.ref().name()); + } + + @Test + public void testInOperandWithLong() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder.startAnd().in("salary", PredicateLeaf.Type.LONG, 3000L, 4000L).end().build(); + + UnboundPredicate expected = Expressions.in("salary", 3000L, 4000L); + UnboundPredicate actual = (UnboundPredicate) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.literals(), expected.literals()); + assertEquals(actual.ref().name(), expected.ref().name()); + } + + @Test + public void testInOperandWithDecimal() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder.startAnd().in("decimal", PredicateLeaf.Type.DECIMAL, + new HiveDecimalWritable("12.14"), new HiveDecimalWritable("13.15")).end().build(); + + UnboundPredicate expected = Expressions.in("decimal", BigDecimal.valueOf(12.14), BigDecimal.valueOf(13.15)); + UnboundPredicate actual = (UnboundPredicate) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.literals(), expected.literals()); + assertEquals(actual.ref().name(), expected.ref().name()); + } + + @Test + public void testInOperandWithDate() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder + .startAnd() + .in("date", PredicateLeaf.Type.DATE, + Date.valueOf("2020-06-15"), Date.valueOf("2021-06-15")) + .end() + .build(); + + UnboundPredicate expected = Expressions.in("date", LocalDate.of(2020, 6, 15).toEpochDay(), + LocalDate.of(2021, 6, 15).toEpochDay()); + UnboundPredicate actual = (UnboundPredicate) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.literals(), expected.literals()); + assertEquals(actual.ref().name(), expected.ref().name()); + assertEquals(expected.toString(), actual.toString()); + } + + @Test + public void testInOperandWithTimestamp() { + Timestamp timestampHiveFilterOne = Timestamp.valueOf("2016-11-16 06:43:19.77"); + Timestamp timestampHiveFilterTwo = Timestamp.valueOf("2017-11-16 06:43:19.77"); + + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder + .startAnd() + .in("timestamp", PredicateLeaf.Type.TIMESTAMP, timestampHiveFilterOne, timestampHiveFilterTwo) + .end() + .build(); + + UnboundPredicate expected = Expressions.in("timestamp", + 1479278599770000L, 1510814599770000L); + UnboundPredicate actual = (UnboundPredicate) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(expected.op(), actual.op()); + assertEquals(expected.literals(), actual.literals()); + assertEquals(expected.ref().name(), actual.ref().name()); + assertEquals(expected.toString(), actual.toString()); + } + + + @Test + public void testBetweenOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder + .startAnd() + .between("salary", PredicateLeaf.Type.LONG, 3000L, 4000L).end().build(); + + And expected = (And) Expressions.and(Expressions.greaterThanOrEqual("salary", 3000L), + Expressions.lessThanOrEqual("salary", 3000L)); + And actual = (And) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.left().op(), expected.left().op()); + assertEquals(actual.right().op(), expected.right().op()); + } + + @Test + public void testIsNullOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder.startAnd().isNull("salary", PredicateLeaf.Type.LONG).end().build(); + + UnboundPredicate expected = Expressions.isNull("salary"); + UnboundPredicate actual = (UnboundPredicate) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.ref().name(), expected.ref().name()); + } + + @Test + public void testAndOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder + .startAnd() + .equals("salary", PredicateLeaf.Type.LONG, 3000L) + .equals("salary", PredicateLeaf.Type.LONG, 4000L) + .end().build(); + + And expected = (And) Expressions + .and(Expressions.equal("salary", 3000L), Expressions.equal("salary", 4000L)); + And actual = (And) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.left().op(), expected.left().op()); + assertEquals(actual.right().op(), expected.right().op()); + } + + @Test + public void testOrOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder + .startOr() + .equals("salary", PredicateLeaf.Type.LONG, 3000L) + .equals("salary", PredicateLeaf.Type.LONG, 4000L) + .end().build(); + + Or expected = (Or) Expressions + .or(Expressions.equal("salary", 3000L), Expressions.equal("salary", 4000L)); + Or actual = (Or) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.left().op(), expected.left().op()); + assertEquals(actual.right().op(), expected.right().op()); + } + + @Test + public void testManyAndOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder + .startAnd() + .equals("salary", PredicateLeaf.Type.LONG, 3000L) + .equals("job", PredicateLeaf.Type.LONG, 4000L) + .equals("name", PredicateLeaf.Type.LONG, 9000L) + .end() + .build(); + + And expected = (And) Expressions.and( + Expressions.equal("salary", 3000L), + Expressions.equal("job", 4000L), + Expressions.equal("name", 9000L)); + + And actual = (And) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.right().op(), expected.right().op()); + assertEquals(actual.left().op(), expected.left().op()); + } + + @Test + public void testManyOrOperand() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder + .startOr() + .equals("salary", PredicateLeaf.Type.LONG, 3000L) + .equals("job", PredicateLeaf.Type.LONG, 4000L) + .equals("name", PredicateLeaf.Type.LONG, 9000L) + .end() + .build(); + + Or expected = (Or) Expressions.or(Expressions.or(Expressions.equal("salary", 3000L), + Expressions.equal("job", 4000L)), Expressions.equal("name", 9000L)); + + Or actual = (Or) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(actual.op(), expected.op()); + assertEquals(actual.right().op(), expected.right().op()); + assertEquals(actual.left().op(), expected.left().op()); + } + + @Test + public void testNestedFilter() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder + .startOr() + .equals("job", PredicateLeaf.Type.STRING, "dev") + .startAnd() + .equals("id", PredicateLeaf.Type.LONG, 3L) + .equals("dept", PredicateLeaf.Type.STRING, "300") + .end() + .end() + .build(); + + And expected = (And) Expressions.and(Expressions.or(Expressions.equal("job", "dev"), Expressions.equal( + "id", 3L)), Expressions.or(Expressions.equal("job", "dev"), Expressions.equal("dept", "300"))); + And actual = (And) IcebergFilterFactory.generateFilterExpression(arg); + assertEquals(actual.op(), expected.op()); + assertEquals(actual.right().op(), expected.right().op()); + assertEquals(actual.left().op(), expected.left().op()); + } + + @Test + public void testTypeConversion() { + SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); + SearchArgument arg = builder + .startAnd() + .equals("date", PredicateLeaf.Type.DATE, Date.valueOf("2020-06-15")) + .equals("timestamp", PredicateLeaf.Type.TIMESTAMP, Timestamp.valueOf("2016-11-16 06:43:19.77")) + .equals("decimal", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("12.12")) + .equals("string", PredicateLeaf.Type.STRING, "hello world") + .equals("long", PredicateLeaf.Type.LONG, 3020L) + .equals("float", PredicateLeaf.Type.FLOAT, 4400D) + .equals("boolean", PredicateLeaf.Type.BOOLEAN, true) + .end() + .build(); + + Timestamp timestamp = Timestamp.valueOf("2016-11-16 06:43:19.77"); + And expected = (And) Expressions.and( + Expressions.equal("date", LocalDate.of(2020, 6, 15).toEpochDay()), + Expressions.equal("timestamp", 1479278599770000L), + Expressions.equal("decimal", BigDecimal.valueOf(12.12)), + Expressions.equal("string", "hello world"), + Expressions.equal("long", 3020L), + Expressions.equal("float", 4400D), + Expressions.equal("boolean", true)); + + And actual = (And) IcebergFilterFactory.generateFilterExpression(arg); + + assertEquals(expected.toString(), actual.toString()); + assertEquals(expected.op(), actual.op()); + + } +} diff --git a/mr/src/test/java/org/apache/iceberg/mr/mapred/TestIcebergInputFormat.java b/mr/src/test/java/org/apache/iceberg/mr/mapred/TestIcebergInputFormat.java new file mode 100644 index 000000000000..bc90fd3c35f8 --- /dev/null +++ b/mr/src/test/java/org/apache/iceberg/mr/mapred/TestIcebergInputFormat.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.mr.mapred; + +import com.klarna.hiverunner.HiveShell; +import com.klarna.hiverunner.StandaloneHiveRunner; +import com.klarna.hiverunner.annotations.HiveSQL; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import org.apache.hadoop.mapred.InputSplit; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.RecordReader; +import org.apache.iceberg.DataFile; +import org.apache.iceberg.FileFormat; +import org.apache.iceberg.PartitionSpec; +import org.apache.iceberg.Schema; +import org.apache.iceberg.Table; +import org.apache.iceberg.data.Record; +import org.apache.iceberg.hadoop.HadoopTables; +import org.apache.iceberg.mr.InputFormatConfig; +import org.apache.iceberg.mr.TestHelpers; +import org.apache.iceberg.types.Types; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; + +import static org.apache.iceberg.types.Types.NestedField.optional; +import static org.apache.iceberg.types.Types.NestedField.required; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +@RunWith(StandaloneHiveRunner.class) +public class TestIcebergInputFormat { + + @Rule + public TemporaryFolder temp = new TemporaryFolder(); + + @HiveSQL(files = {}, autoStart = true) + private HiveShell shell; + + private IcebergInputFormat inputFormat = new IcebergInputFormat(); + private File tableLocation; + private JobConf conf = new JobConf(); + + @Before + public void before() throws IOException { + tableLocation = temp.newFolder(); + Schema schema = new Schema(required(1, "id", Types.LongType.get()), + optional(2, "data", Types.StringType.get())); + PartitionSpec spec = PartitionSpec.unpartitioned(); + + HadoopTables tables = new HadoopTables(); + Table table = tables.create(schema, spec, tableLocation.getAbsolutePath()); + + List data = new ArrayList<>(); + data.add(TestHelpers.createSimpleRecord(1L, "Michael")); + data.add(TestHelpers.createSimpleRecord(2L, "Andy")); + data.add(TestHelpers.createSimpleRecord(3L, "Berta")); + + DataFile fileA = TestHelpers.writeFile(temp.newFile(), table, null, FileFormat.PARQUET, data); + table.newAppend().appendFile(fileA).commit(); + } + + @Test + public void testGetSplits() throws IOException { + IcebergInputFormat format = new IcebergInputFormat(); + conf.set(InputFormatConfig.TABLE_LOCATION, tableLocation.getAbsolutePath()); + conf.set(InputFormatConfig.TABLE_NAME, "source_db.table_a"); + InputSplit[] splits = format.getSplits(conf, 1); + assertEquals(splits.length, 1); + } + + @Test(expected = NullPointerException.class) + public void testGetSplitsNoLocation() throws IOException { + conf.set(InputFormatConfig.CATALOG_NAME, InputFormatConfig.HADOOP_TABLES); + conf.set(InputFormatConfig.TABLE_NAME, "source_db.table_a"); + inputFormat.getSplits(conf, 1); + } + + @Test + public void testInputFormat() { + shell.execute("CREATE DATABASE source_db"); + shell.execute(new StringBuilder() + .append("CREATE TABLE source_db.table_a ") + .append("ROW FORMAT SERDE 'org.apache.iceberg.mr.mapred.IcebergSerDe' ") + .append("STORED AS ") + .append("INPUTFORMAT 'org.apache.iceberg.mr.mapred.IcebergInputFormat' ") + .append("OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ") + .append("LOCATION '") + .append(tableLocation.getAbsolutePath()) + .append("' TBLPROPERTIES ('iceberg.catalog'='hadoop.tables'") + .append(")") + .toString()); + + List result = shell.executeStatement("SELECT id, data FROM source_db.table_a"); + + assertEquals(3, result.size()); + assertArrayEquals(new Object[]{1L, "Michael"}, result.get(0)); + assertArrayEquals(new Object[]{2L, "Andy"}, result.get(1)); + assertArrayEquals(new Object[]{3L, "Berta"}, result.get(2)); + } + + private List readRecords(JobConf jobConf) throws IOException { + InputSplit[] splits = inputFormat.getSplits(jobConf, 1); + RecordReader reader = inputFormat.getRecordReader(splits[0], jobConf, null); + List records = new ArrayList<>(); + IcebergWritable value = (IcebergWritable) reader.createValue(); + while (reader.next(null, value)) { + records.add(value.record().copy()); + } + return records; + } + +} diff --git a/mr/src/test/java/org/apache/iceberg/mr/mapreduce/TestIcebergInputFormat.java b/mr/src/test/java/org/apache/iceberg/mr/mapreduce/TestIcebergInputFormat.java index 05842f80e6bd..ba04708d4292 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/mapreduce/TestIcebergInputFormat.java +++ b/mr/src/test/java/org/apache/iceberg/mr/mapreduce/TestIcebergInputFormat.java @@ -20,7 +20,6 @@ package org.apache.iceberg.mr.mapreduce; import java.io.File; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -37,30 +36,20 @@ import org.apache.iceberg.AppendFiles; import org.apache.iceberg.AssertHelpers; import org.apache.iceberg.DataFile; -import org.apache.iceberg.DataFiles; import org.apache.iceberg.FileFormat; -import org.apache.iceberg.Files; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; -import org.apache.iceberg.StructLike; import org.apache.iceberg.Table; import org.apache.iceberg.TableProperties; import org.apache.iceberg.TestHelpers.Row; -import org.apache.iceberg.avro.Avro; import org.apache.iceberg.catalog.Catalog; import org.apache.iceberg.catalog.TableIdentifier; import org.apache.iceberg.data.RandomGenericData; import org.apache.iceberg.data.Record; -import org.apache.iceberg.data.avro.DataWriter; -import org.apache.iceberg.data.orc.GenericOrcWriter; -import org.apache.iceberg.data.parquet.GenericParquetWriter; import org.apache.iceberg.expressions.Expressions; import org.apache.iceberg.hadoop.HadoopCatalog; import org.apache.iceberg.hadoop.HadoopTables; -import org.apache.iceberg.io.FileAppender; import org.apache.iceberg.mr.InputFormatConfig; -import org.apache.iceberg.orc.ORC; -import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.FluentIterable; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; @@ -69,31 +58,32 @@ import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; import org.junit.Assert; -import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +import static org.apache.iceberg.mr.TestHelpers.writeFile; import static org.apache.iceberg.types.Types.NestedField.required; @RunWith(Parameterized.class) public class TestIcebergInputFormat { - static final Schema SCHEMA = new Schema( - required(1, "data", Types.StringType.get()), - required(2, "id", Types.LongType.get()), - required(3, "date", Types.StringType.get())); + private static final Schema SCHEMA = new Schema( + required(1, "data", Types.StringType.get()), + required(2, "id", Types.LongType.get()), + required(3, "date", Types.StringType.get())); - static final PartitionSpec SPEC = PartitionSpec.builderFor(SCHEMA) - .identity("date") - .bucket("id", 1) - .build(); + private static final PartitionSpec SPEC = PartitionSpec.builderFor(SCHEMA) + .identity("date") + .bucket("id", 1) + .build(); @Rule public TemporaryFolder temp = new TemporaryFolder(); - private HadoopTables tables; - private Configuration conf; + + private Configuration conf = new Configuration(); + private HadoopTables tables = new HadoopTables(conf); @Parameterized.Parameters public static Object[][] parameters() { @@ -110,24 +100,18 @@ public TestIcebergInputFormat(String format) { this.format = FileFormat.valueOf(format.toUpperCase(Locale.ENGLISH)); } - @Before - public void before() { - conf = new Configuration(); - tables = new HadoopTables(conf); - } - @Test public void testUnpartitionedTable() throws Exception { File location = temp.newFolder(format.name()); Assert.assertTrue(location.delete()); Table table = tables.create(SCHEMA, PartitionSpec.unpartitioned(), - ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), - location.toString()); + ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), + location.toString()); List expectedRecords = RandomGenericData.generate(table.schema(), 1, 0L); - DataFile dataFile = writeFile(table, null, format, expectedRecords); + DataFile dataFile = writeFile(temp.newFile(), table, null, format, expectedRecords); table.newAppend() - .appendFile(dataFile) - .commit(); + .appendFile(dataFile) + .commit(); Job job = Job.getInstance(conf); InputFormatConfig.ConfigBuilder configBuilder = IcebergInputFormat.configure(job); configBuilder.readFrom(location.toString()).schema(table.schema()); @@ -139,14 +123,14 @@ public void testPartitionedTable() throws Exception { File location = temp.newFolder(format.name()); Assert.assertTrue(location.delete()); Table table = tables.create(SCHEMA, SPEC, - ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), - location.toString()); + ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), + location.toString()); List expectedRecords = RandomGenericData.generate(table.schema(), 1, 0L); expectedRecords.get(0).set(2, "2020-03-20"); - DataFile dataFile = writeFile(table, Row.of("2020-03-20", 0), format, expectedRecords); + DataFile dataFile = writeFile(temp.newFile(), table, Row.of("2020-03-20", 0), format, expectedRecords); table.newAppend() - .appendFile(dataFile) - .commit(); + .appendFile(dataFile) + .commit(); Job job = Job.getInstance(conf); InputFormatConfig.ConfigBuilder configBuilder = IcebergInputFormat.configure(job); @@ -159,18 +143,18 @@ public void testFilterExp() throws Exception { File location = temp.newFolder(format.name()); Assert.assertTrue(location.delete()); Table table = tables.create(SCHEMA, SPEC, - ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), - location.toString()); + ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), + location.toString()); List expectedRecords = RandomGenericData.generate(table.schema(), 2, 0L); expectedRecords.get(0).set(2, "2020-03-20"); expectedRecords.get(1).set(2, "2020-03-20"); - DataFile dataFile1 = writeFile(table, Row.of("2020-03-20", 0), format, expectedRecords); - DataFile dataFile2 = writeFile(table, Row.of("2020-03-21", 0), format, - RandomGenericData.generate(table.schema(), 2, 0L)); + DataFile dataFile1 = writeFile(temp.newFile(), table, Row.of("2020-03-20", 0), format, expectedRecords); + DataFile dataFile2 = writeFile(temp.newFile(), table, Row.of("2020-03-21", 0), format, + RandomGenericData.generate(table.schema(), 2, 0L)); table.newAppend() - .appendFile(dataFile1) - .appendFile(dataFile2) - .commit(); + .appendFile(dataFile1) + .appendFile(dataFile2) + .commit(); Job job = Job.getInstance(conf); InputFormatConfig.ConfigBuilder configBuilder = IcebergInputFormat.configure(job); configBuilder.readFrom(location.toString()) @@ -184,8 +168,8 @@ public void testResiduals() throws Exception { File location = temp.newFolder(format.name()); Assert.assertTrue(location.delete()); Table table = tables.create(SCHEMA, SPEC, - ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), - location.toString()); + ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), + location.toString()); List writeRecords = RandomGenericData.generate(table.schema(), 2, 0L); writeRecords.get(0).set(1, 123L); writeRecords.get(0).set(2, "2020-03-20"); @@ -195,13 +179,13 @@ public void testResiduals() throws Exception { List expectedRecords = new ArrayList<>(); expectedRecords.add(writeRecords.get(0)); - DataFile dataFile1 = writeFile(table, Row.of("2020-03-20", 0), format, writeRecords); - DataFile dataFile2 = writeFile(table, Row.of("2020-03-21", 0), format, - RandomGenericData.generate(table.schema(), 2, 0L)); + DataFile dataFile1 = writeFile(temp.newFile(), table, Row.of("2020-03-20", 0), format, writeRecords); + DataFile dataFile2 = writeFile(temp.newFile(), table, Row.of("2020-03-21", 0), format, + RandomGenericData.generate(table.schema(), 2, 0L)); table.newAppend() - .appendFile(dataFile1) - .appendFile(dataFile2) - .commit(); + .appendFile(dataFile1) + .appendFile(dataFile2) + .commit(); Job job = Job.getInstance(conf); InputFormatConfig.ConfigBuilder configBuilder = IcebergInputFormat.configure(job); configBuilder.readFrom(location.toString()) @@ -233,7 +217,7 @@ public void testFailedResidualFiltering() throws Exception { expectedRecords.get(0).set(2, "2020-03-20"); expectedRecords.get(1).set(2, "2020-03-20"); - DataFile dataFile1 = writeFile(table, Row.of("2020-03-20", 0), format, expectedRecords); + DataFile dataFile1 = writeFile(temp.newFile(), table, Row.of("2020-03-20", 0), format, expectedRecords); table.newAppend() .appendFile(dataFile1) .commit(); @@ -269,42 +253,42 @@ public void testProjection() throws Exception { Assert.assertTrue(location.delete()); Schema projectedSchema = TypeUtil.select(SCHEMA, ImmutableSet.of(1)); Table table = tables.create(SCHEMA, SPEC, - ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), - location.toString()); + ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), + location.toString()); List inputRecords = RandomGenericData.generate(table.schema(), 1, 0L); - DataFile dataFile = writeFile(table, Row.of("2020-03-20", 0), format, inputRecords); + DataFile dataFile = writeFile(temp.newFile(), table, Row.of("2020-03-20", 0), format, inputRecords); table.newAppend() - .appendFile(dataFile) - .commit(); + .appendFile(dataFile) + .commit(); Job job = Job.getInstance(conf); InputFormatConfig.ConfigBuilder configBuilder = IcebergInputFormat.configure(job); configBuilder - .schema(table.schema()) - .readFrom(location.toString()) - .project(projectedSchema); + .readFrom(location.toString()) + .project(projectedSchema) + .schema(table.schema()); List outputRecords = readRecords(job.getConfiguration()); Assert.assertEquals(inputRecords.size(), outputRecords.size()); Assert.assertEquals(projectedSchema.asStruct(), outputRecords.get(0).struct()); } private static final Schema LOG_SCHEMA = new Schema( - Types.NestedField.optional(1, "id", Types.IntegerType.get()), - Types.NestedField.optional(2, "date", Types.StringType.get()), - Types.NestedField.optional(3, "level", Types.StringType.get()), - Types.NestedField.optional(4, "message", Types.StringType.get()) + Types.NestedField.optional(1, "id", Types.IntegerType.get()), + Types.NestedField.optional(2, "date", Types.StringType.get()), + Types.NestedField.optional(3, "level", Types.StringType.get()), + Types.NestedField.optional(4, "message", Types.StringType.get()) ); private static final PartitionSpec IDENTITY_PARTITION_SPEC = - PartitionSpec.builderFor(LOG_SCHEMA).identity("date").identity("level").build(); + PartitionSpec.builderFor(LOG_SCHEMA).identity("date").identity("level").build(); @Test public void testIdentityPartitionProjections() throws Exception { File location = temp.newFolder(format.name()); Assert.assertTrue(location.delete()); Table table = tables.create(LOG_SCHEMA, IDENTITY_PARTITION_SPEC, - ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), - location.toString()); + ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), + location.toString()); List inputRecords = RandomGenericData.generate(LOG_SCHEMA, 10, 0); Integer idx = 0; @@ -312,7 +296,8 @@ public void testIdentityPartitionProjections() throws Exception { for (Record record : inputRecords) { record.set(1, "2020-03-2" + idx); record.set(2, idx.toString()); - append.appendFile(writeFile(table, Row.of("2020-03-2" + idx, idx.toString()), format, ImmutableList.of(record))); + append.appendFile(writeFile(temp.newFile(), table, Row.of("2020-03-2" + idx, idx.toString()), format, + ImmutableList.of(record))); idx += 1; } append.commit(); @@ -367,13 +352,13 @@ private static Schema withColumns(String... names) { } private void validateIdentityPartitionProjections( - String tablePath, Schema tableSchema, Schema projectedSchema, List inputRecords) throws Exception { + String tablePath, Schema tableSchema, Schema projectedSchema, List inputRecords) throws Exception { Job job = Job.getInstance(conf); InputFormatConfig.ConfigBuilder configBuilder = IcebergInputFormat.configure(job); configBuilder - .schema(tableSchema) - .readFrom(tablePath) - .project(projectedSchema); + .readFrom(tablePath) + .schema(tableSchema) + .project(projectedSchema); List actualRecords = readRecords(job.getConfiguration()); Set fieldNames = TypeUtil.indexByName(projectedSchema.asStruct()).keySet(); @@ -383,7 +368,8 @@ private void validateIdentityPartitionProjections( Assert.assertEquals("Projected schema should match", projectedSchema.asStruct(), actualRecord.struct()); for (String name : fieldNames) { Assert.assertEquals( - "Projected field " + name + " should match", inputRecord.getField(name), actualRecord.getField(name)); + "Projected field " + name + " should match", inputRecord.getField(name), + actualRecord.getField(name)); } } } @@ -393,23 +379,24 @@ public void testSnapshotReads() throws Exception { File location = temp.newFolder(format.name()); Assert.assertTrue(location.delete()); Table table = tables.create(SCHEMA, PartitionSpec.unpartitioned(), - ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), - location.toString()); + ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), + location.toString()); List expectedRecords = RandomGenericData.generate(table.schema(), 1, 0L); table.newAppend() - .appendFile(writeFile(table, null, format, expectedRecords)) - .commit(); + .appendFile(writeFile(temp.newFile(), table, null, format, expectedRecords)) + .commit(); long snapshotId = table.currentSnapshot().snapshotId(); table.newAppend() - .appendFile(writeFile(table, null, format, RandomGenericData.generate(table.schema(), 1, 0L))) - .commit(); + .appendFile(writeFile(temp.newFile(), table, null, format, + RandomGenericData.generate(table.schema(), 1, 0L))) + .commit(); Job job = Job.getInstance(conf); InputFormatConfig.ConfigBuilder configBuilder = IcebergInputFormat.configure(job); configBuilder - .schema(table.schema()) - .readFrom(location.toString()) - .snapshotId(snapshotId); + .schema(table.schema()) + .readFrom(location.toString()) + .snapshotId(snapshotId); validate(job, expectedRecords); } @@ -419,12 +406,12 @@ public void testLocality() throws Exception { File location = temp.newFolder(format.name()); Assert.assertTrue(location.delete()); Table table = tables.create(SCHEMA, PartitionSpec.unpartitioned(), - ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), - location.toString()); + ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name()), + location.toString()); List expectedRecords = RandomGenericData.generate(table.schema(), 1, 0L); table.newAppend() - .appendFile(writeFile(table, null, format, expectedRecords)) - .commit(); + .appendFile(writeFile(temp.newFile(), table, null, format, expectedRecords)) + .commit(); Job job = Job.getInstance(conf); InputFormatConfig.ConfigBuilder configBuilder = IcebergInputFormat.configure(job); configBuilder.readFrom(location.toString()).schema(table.schema()); @@ -454,20 +441,20 @@ public void testCustomCatalog() throws Exception { Catalog catalog = new HadoopCatalogFunc().apply(conf); TableIdentifier tableIdentifier = TableIdentifier.of("db", "t"); Table table = catalog.createTable(tableIdentifier, SCHEMA, SPEC, - ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name())); + ImmutableMap.of(TableProperties.DEFAULT_FILE_FORMAT, format.name())); List expectedRecords = RandomGenericData.generate(table.schema(), 1, 0L); expectedRecords.get(0).set(2, "2020-03-20"); - DataFile dataFile = writeFile(table, Row.of("2020-03-20", 0), format, expectedRecords); + DataFile dataFile = writeFile(temp.newFile(), table, Row.of("2020-03-20", 0), format, expectedRecords); table.newAppend() - .appendFile(dataFile) - .commit(); + .appendFile(dataFile) + .commit(); Job job = Job.getInstance(conf); InputFormatConfig.ConfigBuilder configBuilder = IcebergInputFormat.configure(job); configBuilder - .catalogFunc(HadoopCatalogFunc.class) - .schema(table.schema()) - .readFrom(tableIdentifier.toString()); + .catalogFunc(HadoopCatalogFunc.class) + .schema(table.schema()) + .readFrom(tableIdentifier.toString()); validate(job, expectedRecords); } @@ -487,14 +474,14 @@ private static List readRecords(Configuration conf) { IcebergInputFormat icebergInputFormat = new IcebergInputFormat<>(); List splits = icebergInputFormat.getSplits(context); return - FluentIterable - .from(splits) - .transformAndConcat(split -> readRecords(icebergInputFormat, split, context)) - .toList(); + FluentIterable + .from(splits) + .transformAndConcat(split -> readRecords(icebergInputFormat, split, context)) + .toList(); } private static Iterable readRecords( - IcebergInputFormat inputFormat, InputSplit split, TaskAttemptContext context) { + IcebergInputFormat inputFormat, InputSplit split, TaskAttemptContext context) { RecordReader recordReader = inputFormat.createRecordReader(split, context); List records = new ArrayList<>(); try { @@ -508,50 +495,4 @@ private static Iterable readRecords( return records; } - private DataFile writeFile( - Table table, StructLike partitionData, FileFormat fileFormat, List records) throws IOException { - File file = temp.newFile(); - Assert.assertTrue(file.delete()); - FileAppender appender; - switch (fileFormat) { - case AVRO: - appender = Avro.write(Files.localOutput(file)) - .schema(table.schema()) - .createWriterFunc(DataWriter::create) - .named(fileFormat.name()) - .build(); - break; - case PARQUET: - appender = Parquet.write(Files.localOutput(file)) - .schema(table.schema()) - .createWriterFunc(GenericParquetWriter::buildWriter) - .named(fileFormat.name()) - .build(); - break; - case ORC: - appender = ORC.write(Files.localOutput(file)) - .schema(table.schema()) - .createWriterFunc(GenericOrcWriter::buildWriter) - .build(); - break; - default: - throw new UnsupportedOperationException("Cannot write format: " + fileFormat); - } - - try { - appender.addAll(records); - } finally { - appender.close(); - } - - DataFiles.Builder builder = DataFiles.builder(table.spec()) - .withPath(file.toString()) - .withFormat(format) - .withFileSizeInBytes(file.length()) - .withMetrics(appender.metrics()); - if (partitionData != null) { - builder.withPartition(partitionData); - } - return builder.build(); - } }