spark-instrumented-optimizer/R/pkg/inst/tests/test_client.R
felixcheung 2213441e5e [SPARK-12019][SPARKR] Support character vector for sparkR.init(), check param and fix doc
and add tests.
Spark submit expects comma-separated list

Author: felixcheung <felixcheung_m@hotmail.com>

Closes #10034 from felixcheung/sparkrinitdoc.
2015-12-03 13:25:20 -08:00

46 lines
1.9 KiB
R

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
context("functions in client.R")
test_that("adding spark-testing-base as a package works", {
args <- generateSparkSubmitArgs("", "", "", "",
"holdenk:spark-testing-base:1.3.0_0.0.5")
expect_equal(gsub("[[:space:]]", "", args),
gsub("[[:space:]]", "",
"--packages holdenk:spark-testing-base:1.3.0_0.0.5"))
})
test_that("no package specified doesn't add packages flag", {
args <- generateSparkSubmitArgs("", "", "", "", "")
expect_equal(gsub("[[:space:]]", "", args),
"")
})
test_that("multiple packages don't produce a warning", {
expect_that(generateSparkSubmitArgs("", "", "", "", c("A", "B")), not(gives_warning()))
})
test_that("sparkJars sparkPackages as character vectors", {
args <- generateSparkSubmitArgs("", "", c("one.jar", "two.jar", "three.jar"), "",
c("com.databricks:spark-avro_2.10:2.0.1",
"com.databricks:spark-csv_2.10:1.3.0"))
expect_match(args, "--jars one.jar,two.jar,three.jar")
expect_match(args,
"--packages com.databricks:spark-avro_2.10:2.0.1,com.databricks:spark-csv_2.10:1.3.0")
})