2016-09-08 11:26:59 -04:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
version: "{build}-{branch}"
|
|
|
|
|
|
|
|
shallow_clone: true
|
|
|
|
|
|
|
|
platform: x64
|
|
|
|
configuration: Debug
|
|
|
|
|
|
|
|
branches:
|
|
|
|
only:
|
|
|
|
- master
|
|
|
|
|
|
|
|
only_commits:
|
|
|
|
files:
|
2017-06-18 03:43:47 -04:00
|
|
|
- appveyor.yml
|
|
|
|
- dev/appveyor-install-dependencies.ps1
|
2016-09-08 11:26:59 -04:00
|
|
|
- R/
|
2017-03-26 02:29:02 -04:00
|
|
|
- sql/core/src/main/scala/org/apache/spark/sql/api/r/
|
|
|
|
- core/src/main/scala/org/apache/spark/api/r/
|
|
|
|
- mllib/src/main/scala/org/apache/spark/ml/r/
|
2017-09-10 20:32:25 -04:00
|
|
|
- core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
|
[SPARK-22495] Fix setup of SPARK_HOME variable on Windows
## What changes were proposed in this pull request?
Fixing the way how `SPARK_HOME` is resolved on Windows. While the previous version was working with the built release download, the set of directories changed slightly for the PySpark `pip` or `conda` install. This has been reflected in Linux files in `bin` but not for Windows `cmd` files.
First fix improves the way how the `jars` directory is found, as this was stoping Windows version of `pip/conda` install from working; JARs were not found by on Session/Context setup.
Second fix is adding `find-spark-home.cmd` script, which uses `find_spark_home.py` script, as the Linux version, to resolve `SPARK_HOME`. It is based on `find-spark-home` bash script, though, some operations are done in different order due to the `cmd` script language limitations. If environment variable is set, the Python script `find_spark_home.py` will not be run. The process can fail if Python is not installed, but it will mostly use this way if PySpark is installed via `pip/conda`, thus, there is some Python in the system.
## How was this patch tested?
Tested on local installation.
Author: Jakub Nowacki <j.s.nowacki@gmail.com>
Closes #19370 from jsnowacki/fix_spark_cmds.
2017-11-22 22:47:38 -05:00
|
|
|
- bin/*.cmd
|
2016-09-08 11:26:59 -04:00
|
|
|
|
|
|
|
cache:
|
|
|
|
- C:\Users\appveyor\.m2
|
|
|
|
|
|
|
|
install:
|
|
|
|
# Install maven and dependencies
|
|
|
|
- ps: .\dev\appveyor-install-dependencies.ps1
|
|
|
|
# Required package for R unit tests
|
2020-02-27 03:18:46 -05:00
|
|
|
- cmd: R -e "install.packages(c('knitr', 'rmarkdown', 'testthat', 'e1071', 'survival', 'arrow'), repos='https://cloud.r-project.org/')"
|
2019-10-03 19:56:45 -04:00
|
|
|
- cmd: R -e "packageVersion('knitr'); packageVersion('rmarkdown'); packageVersion('testthat'); packageVersion('e1071'); packageVersion('survival'); packageVersion('arrow')"
|
2016-09-08 11:26:59 -04:00
|
|
|
|
|
|
|
build_script:
|
2019-08-30 12:39:15 -04:00
|
|
|
# '-Djna.nosys=true' is required to avoid kernel32.dll load failure.
|
|
|
|
# See SPARK-28759.
|
2020-05-18 00:31:06 -04:00
|
|
|
# Ideally we should check the tests related to Hive in SparkR as well (SPARK-31745).
|
|
|
|
- cmd: mvn -DskipTests -Psparkr -Djna.nosys=true package
|
2016-09-08 11:26:59 -04:00
|
|
|
|
2017-05-07 16:10:10 -04:00
|
|
|
environment:
|
|
|
|
NOT_CRAN: true
|
2019-08-10 17:47:11 -04:00
|
|
|
# See SPARK-27848. Currently installing some dependent packages causes
|
2019-05-28 01:42:03 -04:00
|
|
|
# "(converted from warning) unable to identify current timezone 'C':" for an unknown reason.
|
|
|
|
# This environment variable works around to test SparkR against a higher version.
|
|
|
|
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
|
2017-05-07 16:10:10 -04:00
|
|
|
|
2016-09-08 11:26:59 -04:00
|
|
|
test_script:
|
2017-05-06 00:26:55 -04:00
|
|
|
- cmd: .\bin\spark-submit2.cmd --driver-java-options "-Dlog4j.configuration=file:///%CD:\=/%/R/log4j.properties" --conf spark.hadoop.fs.defaultFS="file:///" R\pkg\tests\run-all.R
|
2016-09-08 11:26:59 -04:00
|
|
|
|
|
|
|
notifications:
|
|
|
|
- provider: Email
|
|
|
|
on_build_success: false
|
|
|
|
on_build_failure: false
|
|
|
|
on_build_status_changed: false
|