-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-33618][CORE] Use hadoop-client instead of hadoop-client-api to make hadoop-aws work #30508
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -3,12 +3,14 @@ JLargeArrays/1.5//JLargeArrays-1.5.jar | |
| JTransforms/3.1//JTransforms-3.1.jar | ||
| RoaringBitmap/0.9.0//RoaringBitmap-0.9.0.jar | ||
| ST4/4.0.4//ST4-4.0.4.jar | ||
| accessors-smart/1.2//accessors-smart-1.2.jar | ||
| activation/1.1.1//activation-1.1.1.jar | ||
| aircompressor/0.10//aircompressor-0.10.jar | ||
| algebra_2.12/2.0.0-M2//algebra_2.12-2.0.0-M2.jar | ||
| antlr-runtime/3.5.2//antlr-runtime-3.5.2.jar | ||
| antlr4-runtime/4.8-1//antlr4-runtime-4.8-1.jar | ||
| aopalliance-repackaged/2.6.1//aopalliance-repackaged-2.6.1.jar | ||
| aopalliance/1.0//aopalliance-1.0.jar | ||
| arpack_combined_all/0.1//arpack_combined_all-0.1.jar | ||
| arrow-format/2.0.0//arrow-format-2.0.0.jar | ||
| arrow-memory-core/2.0.0//arrow-memory-core-2.0.0.jar | ||
|
|
@@ -25,12 +27,15 @@ breeze_2.12/1.0//breeze_2.12-1.0.jar | |
| cats-kernel_2.12/2.0.0-M4//cats-kernel_2.12-2.0.0-M4.jar | ||
| chill-java/0.9.5//chill-java-0.9.5.jar | ||
| chill_2.12/0.9.5//chill_2.12-0.9.5.jar | ||
| commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar | ||
| commons-cli/1.2//commons-cli-1.2.jar | ||
| commons-codec/1.10//commons-codec-1.10.jar | ||
| commons-collections/3.2.2//commons-collections-3.2.2.jar | ||
| commons-compiler/3.0.16//commons-compiler-3.0.16.jar | ||
| commons-compress/1.20//commons-compress-1.20.jar | ||
| commons-configuration2/2.1.1//commons-configuration2-2.1.1.jar | ||
| commons-crypto/1.1.0//commons-crypto-1.1.0.jar | ||
| commons-daemon/1.0.13//commons-daemon-1.0.13.jar | ||
| commons-dbcp/1.4//commons-dbcp-1.4.jar | ||
| commons-httpclient/3.1//commons-httpclient-3.1.jar | ||
| commons-io/2.5//commons-io-2.5.jar | ||
|
|
@@ -50,13 +55,30 @@ datanucleus-api-jdo/4.2.4//datanucleus-api-jdo-4.2.4.jar | |
| datanucleus-core/4.1.17//datanucleus-core-4.1.17.jar | ||
| datanucleus-rdbms/4.1.19//datanucleus-rdbms-4.1.19.jar | ||
| derby/10.12.1.1//derby-10.12.1.1.jar | ||
| dnsjava/2.1.7//dnsjava-2.1.7.jar | ||
| dropwizard-metrics-hadoop-metrics2-reporter/0.1.2//dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar | ||
| ehcache/3.3.1//ehcache-3.3.1.jar | ||
| flatbuffers-java/1.9.0//flatbuffers-java-1.9.0.jar | ||
| generex/1.0.2//generex-1.0.2.jar | ||
| geronimo-jcache_1.0_spec/1.0-alpha-1//geronimo-jcache_1.0_spec-1.0-alpha-1.jar | ||
| gson/2.2.4//gson-2.2.4.jar | ||
| guava/14.0.1//guava-14.0.1.jar | ||
| hadoop-client-api/3.2.0//hadoop-client-api-3.2.0.jar | ||
| hadoop-client-runtime/3.2.0//hadoop-client-runtime-3.2.0.jar | ||
| guice-servlet/4.0//guice-servlet-4.0.jar | ||
| guice/4.0//guice-4.0.jar | ||
| hadoop-annotations/3.2.0//hadoop-annotations-3.2.0.jar | ||
| hadoop-auth/3.2.0//hadoop-auth-3.2.0.jar | ||
| hadoop-client/3.2.0//hadoop-client-3.2.0.jar | ||
| hadoop-common/3.2.0//hadoop-common-3.2.0.jar | ||
| hadoop-hdfs-client/3.2.0//hadoop-hdfs-client-3.2.0.jar | ||
| hadoop-mapreduce-client-common/3.2.0//hadoop-mapreduce-client-common-3.2.0.jar | ||
| hadoop-mapreduce-client-core/3.2.0//hadoop-mapreduce-client-core-3.2.0.jar | ||
| hadoop-mapreduce-client-jobclient/3.2.0//hadoop-mapreduce-client-jobclient-3.2.0.jar | ||
| hadoop-yarn-api/3.2.0//hadoop-yarn-api-3.2.0.jar | ||
| hadoop-yarn-client/3.2.0//hadoop-yarn-client-3.2.0.jar | ||
| hadoop-yarn-common/3.2.0//hadoop-yarn-common-3.2.0.jar | ||
| hadoop-yarn-registry/3.2.0//hadoop-yarn-registry-3.2.0.jar | ||
| hadoop-yarn-server-common/3.2.0//hadoop-yarn-server-common-3.2.0.jar | ||
| hadoop-yarn-server-web-proxy/3.2.0//hadoop-yarn-server-web-proxy-3.2.0.jar | ||
| hive-beeline/2.3.7//hive-beeline-2.3.7.jar | ||
| hive-cli/2.3.7//hive-cli-2.3.7.jar | ||
| hive-common/2.3.7//hive-common-2.3.7.jar | ||
|
|
@@ -86,6 +108,8 @@ jackson-core/2.10.0//jackson-core-2.10.0.jar | |
| jackson-databind/2.10.0//jackson-databind-2.10.0.jar | ||
| jackson-dataformat-yaml/2.10.0//jackson-dataformat-yaml-2.10.0.jar | ||
| jackson-datatype-jsr310/2.11.2//jackson-datatype-jsr310-2.11.2.jar | ||
| jackson-jaxrs-base/2.9.5//jackson-jaxrs-base-2.9.5.jar | ||
| jackson-jaxrs-json-provider/2.9.5//jackson-jaxrs-json-provider-2.9.5.jar | ||
| jackson-mapper-asl/1.9.13//jackson-mapper-asl-1.9.13.jar | ||
| jackson-module-jaxb-annotations/2.10.0//jackson-module-jaxb-annotations-2.10.0.jar | ||
| jackson-module-paranamer/2.10.0//jackson-module-paranamer-2.10.0.jar | ||
|
|
@@ -98,11 +122,13 @@ jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar | |
| jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar | ||
| janino/3.0.16//janino-3.0.16.jar | ||
| javassist/3.25.0-GA//javassist-3.25.0-GA.jar | ||
| javax.inject/1//javax.inject-1.jar | ||
| javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar | ||
| javax.servlet-api/3.1.0//javax.servlet-api-3.1.0.jar | ||
| javolution/5.5.1//javolution-5.5.1.jar | ||
| jaxb-api/2.2.11//jaxb-api-2.2.11.jar | ||
| jaxb-runtime/2.3.2//jaxb-runtime-2.3.2.jar | ||
| jcip-annotations/1.0-1//jcip-annotations-1.0-1.jar | ||
| jcl-over-slf4j/1.7.30//jcl-over-slf4j-1.7.30.jar | ||
| jdo-api/3.0.1//jdo-api-3.0.1.jar | ||
| jersey-client/2.30//jersey-client-2.30.jar | ||
|
|
@@ -116,14 +142,30 @@ jline/2.14.6//jline-2.14.6.jar | |
| joda-time/2.10.5//joda-time-2.10.5.jar | ||
| jodd-core/3.5.2//jodd-core-3.5.2.jar | ||
| jpam/1.1//jpam-1.1.jar | ||
| json-smart/2.3//json-smart-2.3.jar | ||
| json/1.8//json-1.8.jar | ||
| json4s-ast_2.12/3.7.0-M5//json4s-ast_2.12-3.7.0-M5.jar | ||
| json4s-core_2.12/3.7.0-M5//json4s-core_2.12-3.7.0-M5.jar | ||
| json4s-jackson_2.12/3.7.0-M5//json4s-jackson_2.12-3.7.0-M5.jar | ||
| json4s-scalap_2.12/3.7.0-M5//json4s-scalap_2.12-3.7.0-M5.jar | ||
| jsp-api/2.1//jsp-api-2.1.jar | ||
| jsr305/3.0.0//jsr305-3.0.0.jar | ||
| jta/1.1//jta-1.1.jar | ||
| jul-to-slf4j/1.7.30//jul-to-slf4j-1.7.30.jar | ||
| kerb-admin/1.0.1//kerb-admin-1.0.1.jar | ||
| kerb-client/1.0.1//kerb-client-1.0.1.jar | ||
| kerb-common/1.0.1//kerb-common-1.0.1.jar | ||
| kerb-core/1.0.1//kerb-core-1.0.1.jar | ||
| kerb-crypto/1.0.1//kerb-crypto-1.0.1.jar | ||
| kerb-identity/1.0.1//kerb-identity-1.0.1.jar | ||
| kerb-server/1.0.1//kerb-server-1.0.1.jar | ||
| kerb-simplekdc/1.0.1//kerb-simplekdc-1.0.1.jar | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. just for curiosity, does spark has a chance to play the role of KDC at runtime?
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is actually a revert. It was added in ce7ba2e#diff-e45e1eee8dcfd7eaf8a013cec02b67806da3edeabe0f195ac6b4402f67d4b6dcR146
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. looks like the original PR does not handle any transitive artifact exclusion at all 😸 |
||
| kerb-util/1.0.1//kerb-util-1.0.1.jar | ||
| kerby-asn1/1.0.1//kerby-asn1-1.0.1.jar | ||
| kerby-config/1.0.1//kerby-config-1.0.1.jar | ||
| kerby-pkix/1.0.1//kerby-pkix-1.0.1.jar | ||
| kerby-util/1.0.1//kerby-util-1.0.1.jar | ||
| kerby-xdr/1.0.1//kerby-xdr-1.0.1.jar | ||
| kryo-shaded/4.0.2//kryo-shaded-4.0.2.jar | ||
| kubernetes-client/4.12.0//kubernetes-client-4.12.0.jar | ||
| kubernetes-model-admissionregistration/4.12.0//kubernetes-model-admissionregistration-4.12.0.jar | ||
|
|
@@ -161,7 +203,9 @@ metrics-json/4.1.1//metrics-json-4.1.1.jar | |
| metrics-jvm/4.1.1//metrics-jvm-4.1.1.jar | ||
| minlog/1.3.0//minlog-1.3.0.jar | ||
| netty-all/4.1.51.Final//netty-all-4.1.51.Final.jar | ||
| nimbus-jose-jwt/4.41.1//nimbus-jose-jwt-4.41.1.jar | ||
| objenesis/2.6//objenesis-2.6.jar | ||
| okhttp/2.7.5//okhttp-2.7.5.jar | ||
| okhttp/3.12.12//okhttp-3.12.12.jar | ||
| okio/1.14.0//okio-1.14.0.jar | ||
| opencsv/2.3//opencsv-2.3.jar | ||
|
|
@@ -180,6 +224,7 @@ parquet-jackson/1.10.1//parquet-jackson-1.10.1.jar | |
| protobuf-java/2.5.0//protobuf-java-2.5.0.jar | ||
| py4j/0.10.9//py4j-0.10.9.jar | ||
| pyrolite/4.30//pyrolite-4.30.jar | ||
| re2j/1.1//re2j-1.1.jar | ||
| scala-collection-compat_2.12/2.1.1//scala-collection-compat_2.12-2.1.1.jar | ||
| scala-compiler/2.12.10//scala-compiler-2.12.10.jar | ||
| scala-library/2.12.10//scala-library-2.12.10.jar | ||
|
|
@@ -197,12 +242,15 @@ spire-platform_2.12/0.17.0-M1//spire-platform_2.12-0.17.0-M1.jar | |
| spire-util_2.12/0.17.0-M1//spire-util_2.12-0.17.0-M1.jar | ||
| spire_2.12/0.17.0-M1//spire_2.12-0.17.0-M1.jar | ||
| stax-api/1.0.1//stax-api-1.0.1.jar | ||
| stax2-api/3.1.4//stax2-api-3.1.4.jar | ||
| stream/2.9.6//stream-2.9.6.jar | ||
| super-csv/2.2.0//super-csv-2.2.0.jar | ||
| threeten-extra/1.5.0//threeten-extra-1.5.0.jar | ||
| token-provider/1.0.1//token-provider-1.0.1.jar | ||
| transaction-api/1.1//transaction-api-1.1.jar | ||
| univocity-parsers/2.9.0//univocity-parsers-2.9.0.jar | ||
| velocity/1.5//velocity-1.5.jar | ||
| woodstox-core/5.0.3//woodstox-core-5.0.3.jar | ||
| xbean-asm7-shaded/4.15//xbean-asm7-shaded-4.15.jar | ||
| xz/1.5//xz-1.5.jar | ||
| zjsonpatch/0.3.0//zjsonpatch-0.3.0.jar | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This pulls a ton more code into Spark now, like the whole client... hm, is this going to affect the hadoop-provided distro? it also downgrades some versions above which may be harmless. We really need this just for hadoop-aws?
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Oh, @srowen this is basically a revert. There was an issue found of shading hadoop client so it was reverted here as a safe choice. A proper fix is in progress.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ah OK, nevermind. I am not following closely.