@@ -40,13 +40,16 @@ import spark.deploy.LocalSparkCluster
4040import spark .partial .ApproximateEvaluator
4141import spark .partial .PartialResult
4242import rdd .{CheckpointRDD , HadoopRDD , NewHadoopRDD , UnionRDD }
43- import scheduler .{ ResultTask , ShuffleMapTask , DAGScheduler , TaskScheduler }
43+ import scheduler ._
4444import spark .scheduler .local .LocalScheduler
4545import spark .scheduler .cluster .{SparkDeploySchedulerBackend , SchedulerBackend , ClusterScheduler }
4646import spark .scheduler .mesos .{CoarseMesosSchedulerBackend , MesosSchedulerBackend }
4747import storage .BlockManagerUI
48+ import storage .RDDInfo
49+ import storage .StorageStatus
4850import util .{MetadataCleaner , TimeStampedHashMap }
4951import storage .{StorageStatus , StorageUtils , RDDInfo }
52+ import scala .Some
5053
5154/**
5255 * Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
@@ -482,6 +485,10 @@ class SparkContext(
482485 StorageUtils .rddInfoFromStorageStatus(getSlavesStorageStatus, this )
483486 }
484487
488+ def getStageInfo : Map [Stage ,StageInfo ] = {
489+ dagScheduler.stageToInfos
490+ }
491+
485492 /**
486493 * Return information about blocks stored in all of the slaves
487494 */
0 commit comments