Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rpc

private[rpc] class RpcEnvStoppedException()
extends IllegalStateException("RpcEnv already stopped.")
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ private[netty] class Dispatcher(nettyEnv: NettyRpcEnv) extends Logging {
val iter = endpoints.keySet().iterator()
while (iter.hasNext) {
val name = iter.next
postMessage(name, message, (e) => logWarning(s"Message $message dropped.", e))
postMessage(name, message, (e) => logWarning(s"Message $message dropped. ${e.getMessage}"))
}
}

Expand Down Expand Up @@ -156,7 +156,7 @@ private[netty] class Dispatcher(nettyEnv: NettyRpcEnv) extends Logging {
if (shouldCallOnStop) {
// We don't need to call `onStop` in the `synchronized` block
val error = if (stopped) {
new IllegalStateException("RpcEnv already stopped.")
new RpcEnvStoppedException()
} else {
new SparkException(s"Could not find $endpointName or it has been stopped.")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,11 @@ private[netty] class NettyRpcEnv(
val remoteAddr = message.receiver.address
if (remoteAddr == address) {
// Message to a local RPC endpoint.
dispatcher.postOneWayMessage(message)
try {
dispatcher.postOneWayMessage(message)
} catch {
case e: RpcEnvStoppedException => logWarning(e.getMessage)
}
} else {
// Message to a remote RPC endpoint.
postToOutbox(message.receiver, OneWayOutboxMessage(serialize(message)))
Expand Down
7 changes: 5 additions & 2 deletions core/src/main/scala/org/apache/spark/rpc/netty/Outbox.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import scala.util.control.NonFatal

import org.apache.spark.{Logging, SparkException}
import org.apache.spark.network.client.{RpcResponseCallback, TransportClient}
import org.apache.spark.rpc.RpcAddress
import org.apache.spark.rpc.{RpcAddress, RpcEnvStoppedException}

private[netty] sealed trait OutboxMessage {

Expand All @@ -43,7 +43,10 @@ private[netty] case class OneWayOutboxMessage(content: ByteBuffer) extends Outbo
}

override def onFailure(e: Throwable): Unit = {
logWarning(s"Failed to send one-way RPC.", e)
e match {
case e1: RpcEnvStoppedException => logWarning(e1.getMessage)
case e1: Throwable => logWarning(s"Failed to send one-way RPC.", e1)
}
}

}
Expand Down