Skip to content
Closed
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 23 additions & 21 deletions R/pkg/R/DataFrame.R
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ setMethod("coltypes",
if (is.null(type)) {
specialtype <- specialtypeshandle(x)
if (is.null(specialtype)) {
stop(paste("Unsupported data type: ", x))
stop("Unsupported data type: ", x)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

here, paste was creating an extra space, now fixed

}
type <- PRIMITIVE_TYPES[[specialtype]]
}
Expand Down Expand Up @@ -829,8 +829,11 @@ setMethod("repartitionByRange",
jcol <- lapply(cols, function(c) { c@jc })
sdf <- callJMethod(x@sdf, "repartitionByRange", numToInt(numPartitions), jcol)
} else {
stop(paste("numPartitions and col must be numeric and Column; however, got",
class(numPartitions), "and", class(col)))
stop(gettextf(
"numPartitions and col must be numeric and Column; however, got %s and %s",
class(numPartitions), class(col), domain = "R-SparkR"),
domain = NA
)
}
} else if (!is.null(col)) {
# only columns are specified
Expand All @@ -839,7 +842,7 @@ setMethod("repartitionByRange",
jcol <- lapply(cols, function(c) { c@jc })
sdf <- callJMethod(x@sdf, "repartitionByRange", jcol)
} else {
stop(paste("col must be Column; however, got", class(col)))
stop("col must be Column; however, got ", class(col))
}
} else if (!is.null(numPartitions)) {
# only numPartitions is specified
Expand Down Expand Up @@ -1068,10 +1071,10 @@ setMethod("sample",
signature(x = "SparkDataFrame"),
function(x, withReplacement = FALSE, fraction, seed) {
if (!is.numeric(fraction)) {
stop(paste("fraction must be numeric; however, got", class(fraction)))
stop("fraction must be numeric; however, got ", class(fraction))
}
if (!is.logical(withReplacement)) {
stop(paste("withReplacement must be logical; however, got", class(withReplacement)))
stop("withReplacement must be logical; however, got ", class(withReplacement))
}

if (!missing(seed)) {
Expand Down Expand Up @@ -1211,11 +1214,10 @@ setMethod("collect",
checkSchemaInArrow(schema(x))
TRUE
}, error = function(e) {
warning(paste0("The conversion from Spark DataFrame to R DataFrame was attempted ",
"with Arrow optimization because ",
"'spark.sql.execution.arrow.sparkr.enabled' is set to true; ",
"however, failed, attempting non-optimization. Reason: ",
e))
warning("The conversion from Spark DataFrame to R DataFrame was attempted ",
"with Arrow optimization because ",
"'spark.sql.execution.arrow.sparkr.enabled' is set to true; ",
"however, failed, attempting non-optimization. Reason: ", e)
FALSE
})
}
Expand Down Expand Up @@ -1513,8 +1515,8 @@ dapplyInternal <- function(x, func, schema) {
if (inherits(schema, "structType")) {
checkSchemaInArrow(schema)
} else if (is.null(schema)) {
stop(paste0("Arrow optimization does not support 'dapplyCollect' yet. Please disable ",
"Arrow optimization or use 'collect' and 'dapply' APIs instead."))
stop("Arrow optimization does not support 'dapplyCollect' yet. Please disable ",
"Arrow optimization or use 'collect' and 'dapply' APIs instead.")
} else {
stop("'schema' should be DDL-formatted string or structType.")
}
Expand Down Expand Up @@ -1995,8 +1997,8 @@ setMethod("[", signature(x = "SparkDataFrame"),
x
} else {
if (class(i) != "Column") {
stop(paste0("Expressions other than filtering predicates are not supported ",
"in the first parameter of extract operator [ or subset() method."))
stop("Expressions other than filtering predicates are not supported ",
"in the first parameter of extract operator [ or subset() method.")
}
filter(x, i)
}
Expand Down Expand Up @@ -2587,18 +2589,18 @@ setMethod("join",
if (is.null(joinType)) {
sdf <- callJMethod(x@sdf, "join", y@sdf, joinExpr@jc)
} else {
if (joinType %in% c("inner", "cross",
valid_join_types <- c("inner", "cross",
"outer", "full", "fullouter", "full_outer",
"left", "leftouter", "left_outer",
"right", "rightouter", "right_outer",
"semi", "left_semi", "leftsemi", "anti", "left_anti", "leftanti")) {
"semi", "left_semi", "leftsemi", "anti", "left_anti", "leftanti")
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

noting here that i changed this error message slightly (removed the or and aligned the ordering (leftsemi/left_semi and leftanti/left_anti are swapped vis-a-vis the %in% condition).

it may be preferable to just change the test not to be so exact. LMK

if (joinType %in% valid_join_types) {
joinType <- gsub("_", "", joinType)
sdf <- callJMethod(x@sdf, "join", y@sdf, joinExpr@jc, joinType)
} else {
stop(paste("joinType must be one of the following types:",
"'inner', 'cross', 'outer', 'full', 'fullouter', 'full_outer',",
"'left', 'leftouter', 'left_outer', 'right', 'rightouter', 'right_outer',",
"'semi', 'leftsemi', 'left_semi', 'anti', 'leftanti' or 'left_anti'."))
stop(gettextf("joinType must be one of the following types: '%s'",
paste(valid_join_types, collapse = "', '"), domain = "R-SparkR"),
domain = NA)
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion R/pkg/R/RDD.R
Original file line number Diff line number Diff line change
Expand Up @@ -947,7 +947,7 @@ setMethod("takeSample", signature(x = "RDD", withReplacement = "logical",
MAXINT <- .Machine$integer.max

if (num < 0)
stop(paste("Negative number of elements requested"))
stop("Negative number of elements requested")

if (initialCount > MAXINT - 1) {
maxSelected <- MAXINT - 1
Expand Down
20 changes: 11 additions & 9 deletions R/pkg/R/SQLContext.R
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ getInternalType <- function(x) {
Date = "date",
POSIXlt = "timestamp",
POSIXct = "timestamp",
stop(paste("Unsupported type for SparkDataFrame:", class(x))))
stop("Unsupported type for SparkDataFrame: ", class(x)))
}

#' return the SparkSession
Expand Down Expand Up @@ -111,9 +111,11 @@ sparkR.conf <- function(key, defaultValue) {
tryCatch(callJMethod(conf, "get", key),
error = function(e) {
if (any(grep("java.util.NoSuchElementException", as.character(e)))) {
stop(paste0("Config '", key, "' is not set"))
stop(gettextf("Config '%s' is not set",
key, domain = "R-SparkR"),
domain = NA)
} else {
stop(paste0("Unknown error: ", as.character(e)))
stop("Unknown error: ", as.character(e))
}
})
} else {
Expand Down Expand Up @@ -207,7 +209,8 @@ getSchema <- function(schema, firstRow = NULL, rdd = NULL) {
names <- lapply(names, function(n) {
nn <- gsub("[.]", "_", n)
if (nn != n) {
warning(paste("Use", nn, "instead of", n, "as column name"))
warning(gettextf("Use %s instead of %s as column name",
nn, n, domain = "R-SparkR"), domain = NA)
}
nn
})
Expand Down Expand Up @@ -289,10 +292,9 @@ createDataFrame <- function(data, schema = NULL, samplingRatio = 1.0,
TRUE
},
error = function(e) {
warning(paste0("createDataFrame attempted Arrow optimization because ",
"'spark.sql.execution.arrow.sparkr.enabled' is set to true; however, ",
"failed, attempting non-optimization. Reason: ",
e))
warning("createDataFrame attempted Arrow optimization because ",
"'spark.sql.execution.arrow.sparkr.enabled' is set to true; however, ",
"failed, attempting non-optimization. Reason: ", e)
FALSE
})
}
Expand Down Expand Up @@ -325,7 +327,7 @@ createDataFrame <- function(data, schema = NULL, samplingRatio = 1.0,
} else if (inherits(data, "RDD")) {
rdd <- data
} else {
stop(paste("unexpected type:", class(data)))
stop("unexpected type: ", class(data))
}

schema <- getSchema(schema, firstRow, rdd)
Expand Down
15 changes: 11 additions & 4 deletions R/pkg/R/client.R
Original file line number Diff line number Diff line change
Expand Up @@ -102,10 +102,17 @@ checkJavaVersion <- function() {
javaVersionNum <- as.integer(versions[1])
}
if (javaVersionNum < minJavaVersion || javaVersionNum >= maxJavaVersion) {
stop(paste0("Java version, greater than or equal to ", minJavaVersion,
" and less than ", maxJavaVersion,
", is required for this package; found version: ",
javaVersionStr))
stop(
gettextf(
"Java version, greater than or equal to %s and less than %s, ",
minJavaVersion, maxJavaVersion, domain = "R-SparkR"
),
gettextf(
"is required for this package; found version: %s",
javaVersionStr, domain = "R-SparkR"
),
domain = NA
)
}
return(javaVersionNum)
}
Expand Down
8 changes: 4 additions & 4 deletions R/pkg/R/context.R
Original file line number Diff line number Diff line change
Expand Up @@ -144,13 +144,13 @@ parallelize <- function(sc, coll, numSlices = 1) {
if ((!is.list(coll) && !is.vector(coll)) || is.data.frame(coll)) {
# nolint end
if (is.data.frame(coll)) {
message(paste("context.R: A data frame is parallelized by columns."))
message("context.R: A data frame is parallelized by columns.")
} else {
if (is.matrix(coll)) {
message(paste("context.R: A matrix is parallelized by elements."))
message("context.R: A matrix is parallelized by elements.")
} else {
message(paste("context.R: parallelize() currently only supports lists and vectors.",
"Calling as.list() to coerce coll into a list."))
message("context.R: parallelize() currently only supports lists and vectors. ",
"Calling as.list() to coerce coll into a list.")
}
}
coll <- as.list(coll)
Expand Down
2 changes: 1 addition & 1 deletion R/pkg/R/deserialize.R
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ readTypedObject <- function(con, type) {
"s" = readStruct(con),
"n" = NULL,
"j" = getJobj(readString(con)),
stop(paste("Unsupported type for deserialization", type)))
stop("Unsupported type for deserialization ", type))
}

readStringData <- function(con, len) {
Expand Down
4 changes: 2 additions & 2 deletions R/pkg/R/group.R
Original file line number Diff line number Diff line change
Expand Up @@ -234,8 +234,8 @@ gapplyInternal <- function(x, func, schema) {
if (inherits(schema, "structType")) {
checkSchemaInArrow(schema)
} else if (is.null(schema)) {
stop(paste0("Arrow optimization does not support 'gapplyCollect' yet. Please disable ",
"Arrow optimization or use 'collect' and 'gapply' APIs instead."))
stop("Arrow optimization does not support 'gapplyCollect' yet. Please disable ",
"Arrow optimization or use 'collect' and 'gapply' APIs instead.")
} else {
stop("'schema' should be DDL-formatted string or structType.")
}
Expand Down
Loading