Skip to content
Closed
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ import org.apache.spark.util._
private[spark] class PythonRDD(
parent: RDD[_],
func: PythonFunction,
preservePartitoning: Boolean)
preservePartitoning: Boolean,
isFromBarrier: Boolean = false)
extends RDD[Array[Byte]](parent) {

val bufferSize = conf.getInt("spark.buffer.size", 65536)
Expand All @@ -63,6 +64,9 @@ private[spark] class PythonRDD(
val runner = PythonRunner(func, bufferSize, reuseWorker)
runner.compute(firstParent.iterator(split, context), split.index, context)
}

@transient protected lazy override val isBarrier_ : Boolean =
isFromBarrier || dependencies.exists(_.rdd.isBarrier())
}

/**
Expand Down
1 change: 0 additions & 1 deletion core/src/main/scala/org/apache/spark/rdd/RDDBarrier.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package org.apache.spark.rdd

import scala.reflect.ClassTag

import org.apache.spark.BarrierTaskContext
import org.apache.spark.TaskContext
import org.apache.spark.annotation.{Experimental, Since}

Expand Down
55 changes: 53 additions & 2 deletions python/pyspark/rdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -2406,6 +2406,26 @@ def toLocalIterator(self):
sock_info = self.ctx._jvm.PythonRDD.toLocalIteratorAndServe(self._jrdd.rdd())
return _load_from_socket(sock_info, self._jrdd_deserializer)

def barrier(self):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't know why we didn't mark the version so far here but we really should .. versionadded:: 2.4.0 here or

@since(2.4)
def barrier(self):
    ...

"""
.. note:: Experimental

Indicates that Spark must launch the tasks together for the current stage.

.. versionadded:: 2.4.0
"""
return RDDBarrier(self)

def isBarrier(self):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do we have this API in the JVM RDD?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In scala RDD there is a private[spark] isBarrier() function, we don't add this to JavaRDD

"""
.. note:: Experimental

Whether this RDD is in a barrier stage.

.. versionadded:: 2.4.0
"""
return self._jrdd.rdd().isBarrier()


def _prepare_for_python_RDD(sc, command):
# the serialized command will be compressed by broadcast
Expand All @@ -2429,6 +2449,33 @@ def _wrap_function(sc, func, deserializer, serializer, profiler=None):
sc.pythonVer, broadcast_vars, sc._javaAccumulator)


class RDDBarrier(object):

"""
.. note:: Experimental

An RDDBarrier turns an RDD into a barrier RDD, which forces Spark to launch tasks of the stage
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: RDDBarrier -> RDD barrier

contains this RDD together.
Copy link
Member

@HyukjinKwon HyukjinKwon Aug 7, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ditto let's add .. versionadded:: 2.4.0 at the end. I guess optionally add them to each API here exposed as well.


.. versionadded:: 2.4.0
"""

def __init__(self, rdd):
self.rdd = rdd

def mapPartitions(self, f, preservesPartitioning=False):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we expose a package private method to get the annotated RDD with isBarrier=True in RDDBarrier, we can implement mapPartitions easily here:

jBarrierRdd = self._jrdd.rdd.barrier().barrierRdd.javaRdd
pyBarrierRdd = RDD(self._jrdd.rdd.barrier().barrierRdd.javaRdd)
pyBarrierRdd.mapPartitions(f, preservesPartitioning)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

docstring?

"""
.. note:: Experimental

Return a new RDD by applying a function to each partition of this RDD.

.. versionadded:: 2.4.0
"""
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

shall we match the documentation, or why is it different?

FWIW, for coding block, just `blabla` should be good enough. Nicer if linked properly by like :class:`ClassName`.

def func(s, iterator):
return f(iterator)
return PipelinedRDD(self.rdd, func, preservesPartitioning, isFromBarrier=True)


class PipelinedRDD(RDD):

"""
Expand All @@ -2448,7 +2495,7 @@ class PipelinedRDD(RDD):
20
"""

def __init__(self, prev, func, preservesPartitioning=False):
def __init__(self, prev, func, preservesPartitioning=False, isFromBarrier=False):
if not isinstance(prev, PipelinedRDD) or not prev._is_pipelinable():
# This transformation is the first in its stage:
self.func = func
Expand All @@ -2474,6 +2521,7 @@ def pipeline_func(split, iterator):
self._jrdd_deserializer = self.ctx.serializer
self._bypass_serializer = False
self.partitioner = prev.partitioner if self.preservesPartitioning else None
self.is_barrier = prev.isBarrier() or isFromBarrier

def getNumPartitions(self):
return self._prev_jrdd.partitions().size()
Expand All @@ -2493,7 +2541,7 @@ def _jrdd(self):
wrapped_func = _wrap_function(self.ctx, self.func, self._prev_jrdd_deserializer,
self._jrdd_deserializer, profiler)
python_rdd = self.ctx._jvm.PythonRDD(self._prev_jrdd.rdd(), wrapped_func,
self.preservesPartitioning)
self.preservesPartitioning, self.is_barrier)
self._jrdd_val = python_rdd.asJavaRDD()

if profiler:
Expand All @@ -2509,6 +2557,9 @@ def id(self):
def _is_pipelinable(self):
return not (self.is_cached or self.is_checkpointed)

def isBarrier(self):
return self.is_barrier


def _test():
import doctest
Expand Down