Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public ChunkFetchFailure(StreamChunkId streamChunkId, String errorString) {
}

@Override
public Type type() { return Type.ChunkFetchFailure; }
public Message.Type type() { return Type.ChunkFetchFailure; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public ChunkFetchRequest(StreamChunkId streamChunkId) {
}

@Override
public Type type() { return Type.ChunkFetchRequest; }
public Message.Type type() { return Type.ChunkFetchRequest; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public ChunkFetchSuccess(StreamChunkId streamChunkId, ManagedBuffer buffer) {
}

@Override
public Type type() { return Type.ChunkFetchSuccess; }
public Message.Type type() { return Type.ChunkFetchSuccess; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public OneWayMessage(ManagedBuffer body) {
}

@Override
public Type type() { return Type.OneWayMessage; }
public Message.Type type() { return Type.OneWayMessage; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public RpcFailure(long requestId, String errorString) {
}

@Override
public Type type() { return Type.RpcFailure; }
public Message.Type type() { return Type.RpcFailure; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public RpcRequest(long requestId, ManagedBuffer message) {
}

@Override
public Type type() { return Type.RpcRequest; }
public Message.Type type() { return Type.RpcRequest; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public RpcResponse(long requestId, ManagedBuffer message) {
}

@Override
public Type type() { return Type.RpcResponse; }
public Message.Type type() { return Type.RpcResponse; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public StreamFailure(String streamId, String error) {
}

@Override
public Type type() { return Type.StreamFailure; }
public Message.Type type() { return Type.StreamFailure; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public StreamRequest(String streamId) {
}

@Override
public Type type() { return Type.StreamRequest; }
public Message.Type type() { return Type.StreamRequest; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public StreamResponse(String streamId, long byteCount, ManagedBuffer buffer) {
}

@Override
public Type type() { return Type.StreamResponse; }
public Message.Type type() { return Type.StreamResponse; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ private UploadStream(long requestId, ManagedBuffer meta, long bodyByteCount) {
}

@Override
public Type type() { return Type.UploadStream; }
public Message.Type type() { return Type.UploadStream; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.spark.network.buffer.NettyManagedBuffer;
import org.apache.spark.network.protocol.Encoders;
import org.apache.spark.network.protocol.AbstractMessage;
import org.apache.spark.network.protocol.Message;

/**
* Encodes a Sasl-related message which is attempting to authenticate using some credentials tagged
Expand All @@ -46,7 +47,7 @@ class SaslMessage extends AbstractMessage {
}

@Override
public Type type() { return Type.User; }
public Message.Type type() { return Type.User; }

@Override
public int encodedLength() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ void createAndStart(String[] blockIds, BlockFetchingListener listener)

public RetryingBlockFetcher(
TransportConf conf,
BlockFetchStarter fetchStarter,
RetryingBlockFetcher.BlockFetchStarter fetchStarter,
String[] blockIds,
BlockFetchingListener listener) {
this.fetchStarter = fetchStarter;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.annotation;

import java.lang.annotation.*;

/**
* APIs that are meant to evolve towards becoming stable APIs, but are not stable APIs yet.
* Evolving interfaces can change from one feature release to another release (i.e. 2.1 to 2.2).
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER,
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
public @interface Evolving {}

This file was deleted.

31 changes: 31 additions & 0 deletions common/tags/src/main/java/org/apache/spark/annotation/Stable.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.annotation;

import java.lang.annotation.*;

/**
* Stable APIs that retain source and binary compatibility within a major release.
* These interfaces can change from one major release to another major release
* (e.g. from 1.0 to 2.0).
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER,
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
public @interface Stable {}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.annotation;

import java.lang.annotation.*;

/**
* Unstable APIs, with no guarantee on stability.
* Classes that are unannotated are considered Unstable.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER,
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
public @interface Unstable {}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import scala.reflect.ClassTag
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream
import com.amazonaws.services.kinesis.model.Record

import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.annotation.Evolving
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.{BlockId, StorageLevel}
import org.apache.spark.streaming.{Duration, StreamingContext, Time}
Expand Down Expand Up @@ -84,14 +84,14 @@ private[kinesis] class KinesisInputDStream[T: ClassTag](
}
}

@InterfaceStability.Evolving
@Evolving
object KinesisInputDStream {
/**
* Builder for [[KinesisInputDStream]] instances.
*
* @since 2.2.0
*/
@InterfaceStability.Evolving
@Evolving
class Builder {
// Required params
private var streamingContext: Option[StreamingContext] = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kinesis

import scala.collection.JavaConverters._
package org.apache.spark.streaming.kinesis

import com.amazonaws.auth._

import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.annotation.Evolving
import org.apache.spark.internal.Logging

/**
Expand Down Expand Up @@ -84,14 +83,14 @@ private[kinesis] final case class STSCredentials(
}
}

@InterfaceStability.Evolving
@Evolving
object SparkAWSCredentials {
/**
* Builder for [[SparkAWSCredentials]] instances.
*
* @since 2.2.0
*/
@InterfaceStability.Evolving
@Evolving
class Builder {
private var basicCreds: Option[BasicCredentials] = None
private var stsCreds: Option[STSCredentials] = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ abstract class AbstractAppHandle implements SparkAppHandle {
private final LauncherServer server;

private LauncherServer.ServerConnection connection;
private List<Listener> listeners;
private AtomicReference<State> state;
private List<SparkAppHandle.Listener> listeners;
private AtomicReference<SparkAppHandle.State> state;
private volatile String appId;
private volatile boolean disposed;

Expand All @@ -42,15 +42,15 @@ protected AbstractAppHandle(LauncherServer server) {
}

@Override
public synchronized void addListener(Listener l) {
public synchronized void addListener(SparkAppHandle.Listener l) {
if (listeners == null) {
listeners = new CopyOnWriteArrayList<>();
}
listeners.add(l);
}

@Override
public State getState() {
public SparkAppHandle.State getState() {
return state.get();
}

Expand Down Expand Up @@ -120,11 +120,11 @@ synchronized void dispose() {
}
}

void setState(State s) {
void setState(SparkAppHandle.State s) {
setState(s, false);
}

void setState(State s, boolean force) {
void setState(SparkAppHandle.State s, boolean force) {
if (force) {
state.set(s);
fireEvent(false);
Expand Down
Loading