diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE index 255be2e76f..95d949ee3e 100644 --- a/R/pkg/NAMESPACE +++ b/R/pkg/NAMESPACE @@ -107,6 +107,7 @@ exportMethods("%in%", "cbrt", "ceil", "ceiling", + "column", "concat", "concat_ws", "contains", diff --git a/R/pkg/R/column.R b/R/pkg/R/column.R index 42e9d12179..20de3907b7 100644 --- a/R/pkg/R/column.R +++ b/R/pkg/R/column.R @@ -36,13 +36,11 @@ setMethod("initialize", "Column", function(.Object, jc) { .Object }) -column <- function(jc) { - new("Column", jc) -} - -col <- function(x) { - column(callJStatic("org.apache.spark.sql.functions", "col", x)) -} +setMethod("column", + signature(x = "jobj"), + function(x) { + new("Column", x) + }) #' @rdname show #' @name show diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R index 94687edb05..a220ad8b9f 100644 --- a/R/pkg/R/functions.R +++ b/R/pkg/R/functions.R @@ -233,6 +233,28 @@ setMethod("ceil", column(jc) }) +#' Though scala functions has "col" function, we don't expose it in SparkR +#' because we don't want to conflict with the "col" function in the R base +#' package and we also have "column" function exported which is an alias of "col". +col <- function(x) { + column(callJStatic("org.apache.spark.sql.functions", "col", x)) +} + +#' column +#' +#' Returns a Column based on the given column name. +#' +#' @rdname col +#' @name column +#' @family normal_funcs +#' @export +#' @examples \dontrun{column(df)} +setMethod("column", + signature(x = "character"), + function(x) { + col(x) + }) + #' cos #' #' Computes the cosine of the given value. diff --git a/R/pkg/R/generics.R b/R/pkg/R/generics.R index c447413180..8fad17026c 100644 --- a/R/pkg/R/generics.R +++ b/R/pkg/R/generics.R @@ -686,6 +686,10 @@ setGeneric("cbrt", function(x) { standardGeneric("cbrt") }) #' @export setGeneric("ceil", function(x) { standardGeneric("ceil") }) +#' @rdname col +#' @export +setGeneric("column", function(x) { standardGeneric("column") }) + #' @rdname concat #' @export setGeneric("concat", function(x, ...) { standardGeneric("concat") }) diff --git a/R/pkg/inst/tests/test_sparkSQL.R b/R/pkg/inst/tests/test_sparkSQL.R index 4804ecf177..3a04edbb4c 100644 --- a/R/pkg/inst/tests/test_sparkSQL.R +++ b/R/pkg/inst/tests/test_sparkSQL.R @@ -787,7 +787,7 @@ test_that("test HiveContext", { }) test_that("column operators", { - c <- SparkR:::col("a") + c <- column("a") c2 <- (- c + 1 - 2) * 3 / 4.0 c3 <- (c + c2 - c2) * c2 %% c2 c4 <- (c > c2) & (c2 <= c3) | (c == c2) & (c2 != c3) @@ -795,7 +795,7 @@ test_that("column operators", { }) test_that("column functions", { - c <- SparkR:::col("a") + c <- column("a") c1 <- abs(c) + acos(c) + approxCountDistinct(c) + ascii(c) + asin(c) + atan(c) c2 <- avg(c) + base64(c) + bin(c) + bitwiseNOT(c) + cbrt(c) + ceil(c) + cos(c) c3 <- cosh(c) + count(c) + crc32(c) + exp(c)