dc4c351837
## What changes were proposed in this pull request? Move all existing tests to non-installed directory so that it will never run by installing SparkR package For a follow-up PR: - remove all skip_on_cran() calls in tests - clean up test timer - improve or change basic tests that do run on CRAN (if anyone has suggestion) It looks like `R CMD build pkg` will still put pkg\tests (ie. the full tests) into the source package but `R CMD INSTALL` on such source package does not install these tests (and so `R CMD check` does not run them) ## How was this patch tested? - [x] unit tests, Jenkins - [x] AppVeyor - [x] make a source package, install it, `R CMD check` it - verify the full tests are not installed or run Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #18264 from felixcheung/rtestset.
56 lines
1.8 KiB
R
56 lines
1.8 KiB
R
#
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
|
|
context("broadcast variables")
|
|
|
|
# JavaSparkContext handle
|
|
sparkSession <- sparkR.session(master = sparkRTestMaster, enableHiveSupport = FALSE)
|
|
sc <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "getJavaSparkContext", sparkSession)
|
|
|
|
# Partitioned data
|
|
nums <- 1:2
|
|
rrdd <- parallelize(sc, nums, 2L)
|
|
|
|
test_that("using broadcast variable", {
|
|
skip_on_cran()
|
|
|
|
randomMat <- matrix(nrow = 10, ncol = 10, data = rnorm(100))
|
|
randomMatBr <- broadcastRDD(sc, randomMat)
|
|
|
|
useBroadcast <- function(x) {
|
|
sum(SparkR:::value(randomMatBr) * x)
|
|
}
|
|
actual <- collectRDD(lapply(rrdd, useBroadcast))
|
|
expected <- list(sum(randomMat) * 1, sum(randomMat) * 2)
|
|
expect_equal(actual, expected)
|
|
})
|
|
|
|
test_that("without using broadcast variable", {
|
|
skip_on_cran()
|
|
|
|
randomMat <- matrix(nrow = 10, ncol = 10, data = rnorm(100))
|
|
|
|
useBroadcast <- function(x) {
|
|
sum(randomMat * x)
|
|
}
|
|
actual <- collectRDD(lapply(rrdd, useBroadcast))
|
|
expected <- list(sum(randomMat) * 1, sum(randomMat) * 2)
|
|
expect_equal(actual, expected)
|
|
})
|
|
|
|
sparkR.session.stop()
|