# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. version: "{build}-{branch}" shallow_clone: true platform: x64 configuration: Debug branches: only: - master only_commits: files: - appveyor.yml - dev/appveyor-install-dependencies.ps1 - R/ - sql/core/src/main/scala/org/apache/spark/sql/api/r/ - core/src/main/scala/org/apache/spark/api/r/ - mllib/src/main/scala/org/apache/spark/ml/r/ - core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala - bin/*.cmd cache: - C:\Users\appveyor\.m2 install: # Install maven and dependencies - ps: .\dev\appveyor-install-dependencies.ps1 # Required package for R unit tests - cmd: R -e "install.packages(c('knitr', 'rmarkdown', 'testthat', 'e1071', 'survival', 'arrow'), repos='https://cloud.r-project.org/')" - cmd: R -e "packageVersion('knitr'); packageVersion('rmarkdown'); packageVersion('testthat'); packageVersion('e1071'); packageVersion('survival'); packageVersion('arrow')" build_script: # '-Djna.nosys=true' is required to avoid kernel32.dll load failure. # See SPARK-28759. # Ideally we should check the tests related to Hive in SparkR as well (SPARK-31745). - cmd: mvn -DskipTests -Psparkr -Djna.nosys=true package environment: NOT_CRAN: true # See SPARK-27848. Currently installing some dependent packages causes # "(converted from warning) unable to identify current timezone 'C':" for an unknown reason. # This environment variable works around to test SparkR against a higher version. R_REMOTES_NO_ERRORS_FROM_WARNINGS: true test_script: - cmd: .\bin\spark-submit2.cmd --driver-java-options "-Dlog4j.configuration=file:///%CD:\=/%/R/log4j.properties" --conf spark.hadoop.fs.defaultFS="file:///" R\pkg\tests\run-all.R notifications: - provider: Email on_build_success: false on_build_failure: false on_build_status_changed: false