diff --git a/core/src/main/java/org/apache/spark/util/SerializableConfigurationSuite.java b/core/src/main/java/org/apache/spark/util/SerializableConfigurationSuite.java new file mode 100644 index 000000000000..135265302827 --- /dev/null +++ b/core/src/main/java/org/apache/spark/util/SerializableConfigurationSuite.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.util; + +/** + * This test ensures that the API we've exposed for SerializableConfiguration is usable + * from Java. It does not test any of the serialization it's self. + */ +class SerializableConfigurationSuite { + public SerializableConfiguration compileTest() { + SerializableConfiguration scs = new SerializableConfiguration(null); + return scs; + } +} diff --git a/core/src/main/scala/org/apache/spark/util/SerializableConfiguration.scala b/core/src/main/scala/org/apache/spark/util/SerializableConfiguration.scala index 3354a923273f..52b309abd77f 100644 --- a/core/src/main/scala/org/apache/spark/util/SerializableConfiguration.scala +++ b/core/src/main/scala/org/apache/spark/util/SerializableConfiguration.scala @@ -20,7 +20,13 @@ import java.io.{ObjectInputStream, ObjectOutputStream} import org.apache.hadoop.conf.Configuration -private[spark] +import org.apache.spark.annotation.{DeveloperApi, Unstable} + +/** + * Helper wrapper to serialize a Hadoop configuration. Intended for use when implementing + * DataSourceV2 readers & writers which depend on the Hadoop configuration from the driver node. + */ +@DeveloperApi @Unstable class SerializableConfiguration(@transient var value: Configuration) extends Serializable { private def writeObject(out: ObjectOutputStream): Unit = Utils.tryOrIOException { out.defaultWriteObject()