f352cef077
### What changes were proposed in this pull request? This PR targets to remove Hive profile in SparkR build at AppVeyor in order to: - Speed up the build. Currently, SparkR build is [reaching the time limit](https://ci.appveyor.com/project/ApacheSoftwareFoundation/spark/builds/32853533). - Clean up the build profile. ### Why are the changes needed? We're hitting a time limit issue again and this PR could reduce the build time. Seems like we're [already skipping Hive related tests in SparkR](https://ci.appveyor.com/project/ApacheSoftwareFoundation/spark/builds/32853533) for some reasons, see below: ``` test_sparkSQL.R:307: skip: create DataFrame from RDD Reason: Hive is not build with SparkSQL, skipped test_sparkSQL.R:1341: skip: test HiveContext Reason: Hive is not build with SparkSQL, skipped test_sparkSQL.R:2813: skip: read/write ORC files Reason: Hive is not build with SparkSQL, skipped test_sparkSQL.R:2834: skip: read/write ORC files - compression option Reason: Hive is not build with SparkSQL, skipped test_sparkSQL.R:3727: skip: enableHiveSupport on SparkSession Reason: Hive is not build with SparkSQL, skipped ``` Although we build with Hive profile. So, the Hive profile is useless here. ### Does this PR introduce _any_ user-facing change? No, dev-only. ### How was this patch tested? AppVeyor will test it out. Closes #28564 from HyukjinKwon/SPARK-31744. Authored-by: HyukjinKwon <gurwls223@apache.org> Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
69 lines
2.6 KiB
YAML
69 lines
2.6 KiB
YAML
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
version: "{build}-{branch}"
|
|
|
|
shallow_clone: true
|
|
|
|
platform: x64
|
|
configuration: Debug
|
|
|
|
branches:
|
|
only:
|
|
- master
|
|
|
|
only_commits:
|
|
files:
|
|
- appveyor.yml
|
|
- dev/appveyor-install-dependencies.ps1
|
|
- R/
|
|
- sql/core/src/main/scala/org/apache/spark/sql/api/r/
|
|
- core/src/main/scala/org/apache/spark/api/r/
|
|
- mllib/src/main/scala/org/apache/spark/ml/r/
|
|
- core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
|
|
- bin/*.cmd
|
|
|
|
cache:
|
|
- C:\Users\appveyor\.m2
|
|
|
|
install:
|
|
# Install maven and dependencies
|
|
- ps: .\dev\appveyor-install-dependencies.ps1
|
|
# Required package for R unit tests
|
|
- cmd: R -e "install.packages(c('knitr', 'rmarkdown', 'testthat', 'e1071', 'survival', 'arrow'), repos='https://cloud.r-project.org/')"
|
|
- cmd: R -e "packageVersion('knitr'); packageVersion('rmarkdown'); packageVersion('testthat'); packageVersion('e1071'); packageVersion('survival'); packageVersion('arrow')"
|
|
|
|
build_script:
|
|
# '-Djna.nosys=true' is required to avoid kernel32.dll load failure.
|
|
# See SPARK-28759.
|
|
# Ideally we should check the tests related to Hive in SparkR as well (SPARK-31745).
|
|
- cmd: mvn -DskipTests -Psparkr -Djna.nosys=true package
|
|
|
|
environment:
|
|
NOT_CRAN: true
|
|
# See SPARK-27848. Currently installing some dependent packages causes
|
|
# "(converted from warning) unable to identify current timezone 'C':" for an unknown reason.
|
|
# This environment variable works around to test SparkR against a higher version.
|
|
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
|
|
|
|
test_script:
|
|
- cmd: .\bin\spark-submit2.cmd --driver-java-options "-Dlog4j.configuration=file:///%CD:\=/%/R/log4j.properties" --conf spark.hadoop.fs.defaultFS="file:///" R\pkg\tests\run-all.R
|
|
|
|
notifications:
|
|
- provider: Email
|
|
on_build_success: false
|
|
on_build_failure: false
|
|
on_build_status_changed: false
|