Skip to content

Commit 34f9aa5

Browse files
committed
getNumPartitions
1 parent 3c2ba9f commit 34f9aa5

File tree

3 files changed

+31
-5
lines changed

3 files changed

+31
-5
lines changed

R/pkg/NAMESPACE

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ exportMethods("arrange",
9494
"freqItems",
9595
"gapply",
9696
"gapplyCollect",
97+
"getNumPartitions",
9798
"group_by",
9899
"groupBy",
99100
"head",

R/pkg/R/DataFrame.R

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3406,3 +3406,28 @@ setMethod("randomSplit",
34063406
}
34073407
sapply(sdfs, dataFrame)
34083408
})
3409+
3410+
#' getNumPartitions
3411+
#'
3412+
#' Return the number of partitions
3413+
#' Note: in order to compute the number of partition the SparkDataFrame has to be converted into a
3414+
#' RDD temporarily internally.
3415+
#'
3416+
#' @param x A SparkDataFrame
3417+
#' @family SparkDataFrame functions
3418+
#' @aliases getNumPartitions,SparkDataFrame-method
3419+
#' @rdname getNumPartitions
3420+
#' @name getNumPartitions
3421+
#' @export
3422+
#' @examples
3423+
#'\dontrun{
3424+
#' sparkR.session()
3425+
#' df <- createDataFrame(cars, numPartitions = 2)
3426+
#' getNumPartitions(df)
3427+
#' }
3428+
#' @note getNumPartitions since 2.1.1
3429+
setMethod("getNumPartitions",
3430+
signature(x = "SparkDataFrame"),
3431+
function(x) {
3432+
getNumPartitions(toRDD(x))
3433+
})

R/pkg/inst/tests/testthat/test_sparkSQL.R

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -196,18 +196,18 @@ test_that("create DataFrame from RDD", {
196196
expect_equal(dtypes(df), list(c("name", "string"), c("age", "int"), c("height", "float")))
197197
expect_equal(as.list(collect(where(df, df$name == "John"))),
198198
list(name = "John", age = 19L, height = 176.5))
199-
expect_equal(getNumPartitions(toRDD(df)), 1)
199+
expect_equal(getNumPartitions(df), 1)
200200

201201
df <- as.DataFrame(cars, numPartitions = 2)
202-
expect_equal(getNumPartitions(toRDD(df)), 2)
202+
expect_equal(getNumPartitions(df), 2)
203203
df <- createDataFrame(cars, numPartitions = 3)
204-
expect_equal(getNumPartitions(toRDD(df)), 3)
204+
expect_equal(getNumPartitions(df), 3)
205205
# validate limit by num of rows
206206
df <- createDataFrame(cars, numPartitions = 60)
207-
expect_equal(getNumPartitions(toRDD(df)), 50)
207+
expect_equal(getNumPartitions(df), 50)
208208
# validate when 1 < (length(coll) / numSlices) << length(coll)
209209
df <- createDataFrame(cars, numPartitions = 20)
210-
expect_equal(getNumPartitions(toRDD(df)), 20)
210+
expect_equal(getNumPartitions(df), 20)
211211

212212
df <- as.DataFrame(data.frame(0))
213213
expect_is(df, "SparkDataFrame")

0 commit comments

Comments
 (0)