[SPARK-10079] [SPARKR] Make 'column' and 'col' functions be S4 functions.

1.  Add a "col" function into DataFrame.
2.  Move the current "col" function in Column.R to functions.R, convert it to S4 function.
3.  Add a s4 "column" function in functions.R.
4.  Convert the "column" function in Column.R to S4 function. This is for private use.

Author: Sun Rui <rui.sun@intel.com>

Closes #8864 from sun-rui/SPARK-10079.
This commit is contained in:
Sun Rui 2015-10-09 23:05:38 -07:00 committed by Shivaram Venkataraman
parent c1b4ce4326
commit 864de3bf40
5 changed files with 34 additions and 9 deletions

View file

@ -107,6 +107,7 @@ exportMethods("%in%",
"cbrt", "cbrt",
"ceil", "ceil",
"ceiling", "ceiling",
"column",
"concat", "concat",
"concat_ws", "concat_ws",
"contains", "contains",

View file

@ -36,13 +36,11 @@ setMethod("initialize", "Column", function(.Object, jc) {
.Object .Object
}) })
column <- function(jc) { setMethod("column",
new("Column", jc) signature(x = "jobj"),
} function(x) {
new("Column", x)
col <- function(x) { })
column(callJStatic("org.apache.spark.sql.functions", "col", x))
}
#' @rdname show #' @rdname show
#' @name show #' @name show

View file

@ -233,6 +233,28 @@ setMethod("ceil",
column(jc) column(jc)
}) })
#' Though scala functions has "col" function, we don't expose it in SparkR
#' because we don't want to conflict with the "col" function in the R base
#' package and we also have "column" function exported which is an alias of "col".
col <- function(x) {
column(callJStatic("org.apache.spark.sql.functions", "col", x))
}
#' column
#'
#' Returns a Column based on the given column name.
#'
#' @rdname col
#' @name column
#' @family normal_funcs
#' @export
#' @examples \dontrun{column(df)}
setMethod("column",
signature(x = "character"),
function(x) {
col(x)
})
#' cos #' cos
#' #'
#' Computes the cosine of the given value. #' Computes the cosine of the given value.

View file

@ -686,6 +686,10 @@ setGeneric("cbrt", function(x) { standardGeneric("cbrt") })
#' @export #' @export
setGeneric("ceil", function(x) { standardGeneric("ceil") }) setGeneric("ceil", function(x) { standardGeneric("ceil") })
#' @rdname col
#' @export
setGeneric("column", function(x) { standardGeneric("column") })
#' @rdname concat #' @rdname concat
#' @export #' @export
setGeneric("concat", function(x, ...) { standardGeneric("concat") }) setGeneric("concat", function(x, ...) { standardGeneric("concat") })

View file

@ -787,7 +787,7 @@ test_that("test HiveContext", {
}) })
test_that("column operators", { test_that("column operators", {
c <- SparkR:::col("a") c <- column("a")
c2 <- (- c + 1 - 2) * 3 / 4.0 c2 <- (- c + 1 - 2) * 3 / 4.0
c3 <- (c + c2 - c2) * c2 %% c2 c3 <- (c + c2 - c2) * c2 %% c2
c4 <- (c > c2) & (c2 <= c3) | (c == c2) & (c2 != c3) c4 <- (c > c2) & (c2 <= c3) | (c == c2) & (c2 != c3)
@ -795,7 +795,7 @@ test_that("column operators", {
}) })
test_that("column functions", { test_that("column functions", {
c <- SparkR:::col("a") c <- column("a")
c1 <- abs(c) + acos(c) + approxCountDistinct(c) + ascii(c) + asin(c) + atan(c) c1 <- abs(c) + acos(c) + approxCountDistinct(c) + ascii(c) + asin(c) + atan(c)
c2 <- avg(c) + base64(c) + bin(c) + bitwiseNOT(c) + cbrt(c) + ceil(c) + cos(c) c2 <- avg(c) + base64(c) + bin(c) + bitwiseNOT(c) + cbrt(c) + ceil(c) + cos(c)
c3 <- cosh(c) + count(c) + crc32(c) + exp(c) c3 <- cosh(c) + count(c) + crc32(c) + exp(c)