[SPARK-7224] [SPARK-7306] mock repository generator for --packages tests without nio.Path

The previous PR for SPARK-7224 (#5790) broke JDK 6, because it used java.nio.Path, which was in jdk 7, and not in 6. This PR uses Guava's `Files` to handle directory creation, and etc...

The description from the previous PR:
> This patch contains an `IvyTestUtils` file, which dynamically generates jars and pom files to test the `--packages` feature without having to rely on the internet, and Maven Central.

cc pwendell

I also rand the flaky test about 20 times locally, it didn't fail a single time, but I think it may fail like once every 100 builds? I still haven't figured the cause yet, but the test before it, `--jars` was also failing after we turned off the `--packages` test in `SparkSubmitSuite`. It may be related to the launch of SparkSubmit.

Author: Burak Yavuz <brkyvz@gmail.com>

Closes #5892 from brkyvz/maven-utils and squashes the following commits:

e9b1903 [Burak Yavuz] fix merge conflict
68214e0 [Burak Yavuz] remove ignore for test(neglect spark dependencies)
e632381 [Burak Yavuz] fix ignore
9ef1408 [Burak Yavuz] re-enable --packages test
22eea62 [Burak Yavuz] Merge branch 'master' of github.com:apache/spark into maven-utils
05cd0de [Burak Yavuz] added mock repository generator

(cherry picked from commit 8014e1f6bb)
Signed-off-by: Josh Rosen <joshrosen@databricks.com>
This commit is contained in:
Burak Yavuz 2015-05-22 17:48:09 -07:00 committed by Josh Rosen
parent 130ec219aa
commit 17a51c8879
5 changed files with 406 additions and 102 deletions

View file

@ -105,23 +105,18 @@ private[spark] object TestUtils {
URI.create(s"string:///${name.replace(".", "/")}${SOURCE.extension}")
}
private class JavaSourceFromString(val name: String, val code: String)
private[spark] class JavaSourceFromString(val name: String, val code: String)
extends SimpleJavaFileObject(createURI(name), SOURCE) {
override def getCharContent(ignoreEncodingErrors: Boolean): String = code
}
/** Creates a compiled class with the given name. Class file will be placed in destDir. */
/** Creates a compiled class with the source file. Class file will be placed in destDir. */
def createCompiledClass(
className: String,
destDir: File,
toStringValue: String = "",
baseClass: String = null,
classpathUrls: Seq[URL] = Seq()): File = {
sourceFile: JavaSourceFromString,
classpathUrls: Seq[URL]): File = {
val compiler = ToolProvider.getSystemJavaCompiler
val extendsText = Option(baseClass).map { c => s" extends ${c}" }.getOrElse("")
val sourceFile = new JavaSourceFromString(className,
"public class " + className + extendsText + " implements java.io.Serializable {" +
" @Override public String toString() { return \"" + toStringValue + "\"; }}")
// Calling this outputs a class file in pwd. It's easier to just rename the file than
// build a custom FileManager that controls the output location.
@ -144,4 +139,18 @@ private[spark] object TestUtils {
assert(out.exists(), "Destination file not moved: " + out.getAbsolutePath())
out
}
/** Creates a compiled class with the given name. Class file will be placed in destDir. */
def createCompiledClass(
className: String,
destDir: File,
toStringValue: String = "",
baseClass: String = null,
classpathUrls: Seq[URL] = Seq()): File = {
val extendsText = Option(baseClass).map { c => s" extends ${c}" }.getOrElse("")
val sourceFile = new JavaSourceFromString(className,
"public class " + className + extendsText + " implements java.io.Serializable {" +
" @Override public String toString() { return \"" + toStringValue + "\"; }}")
createCompiledClass(className, destDir, sourceFile, classpathUrls)
}
}

View file

@ -753,7 +753,9 @@ private[spark] object SparkSubmitUtils {
* @param artifactId the artifactId of the coordinate
* @param version the version of the coordinate
*/
private[deploy] case class MavenCoordinate(groupId: String, artifactId: String, version: String)
private[deploy] case class MavenCoordinate(groupId: String, artifactId: String, version: String) {
override def toString: String = s"$groupId:$artifactId:$version"
}
/**
* Extracts maven coordinates from a comma-delimited string. Coordinates should be provided
@ -776,6 +778,10 @@ private[spark] object SparkSubmitUtils {
}
}
/** Path of the local Maven cache. */
private[spark] def m2Path: File = new File(System.getProperty("user.home"),
".m2" + File.separator + "repository" + File.separator)
/**
* Extracts maven coordinates from a comma-delimited string
* @param remoteRepos Comma-delimited string of remote repositories
@ -789,8 +795,7 @@ private[spark] object SparkSubmitUtils {
val localM2 = new IBiblioResolver
localM2.setM2compatible(true)
val m2Path = ".m2" + File.separator + "repository" + File.separator
localM2.setRoot(new File(System.getProperty("user.home"), m2Path).toURI.toString)
localM2.setRoot(m2Path.toURI.toString)
localM2.setUsepoms(true)
localM2.setName("local-m2-cache")
cr.add(localM2)
@ -915,69 +920,72 @@ private[spark] object SparkSubmitUtils {
""
} else {
val sysOut = System.out
// To prevent ivy from logging to system out
System.setOut(printStream)
val artifacts = extractMavenCoordinates(coordinates)
// Default configuration name for ivy
val ivyConfName = "default"
// set ivy settings for location of cache
val ivySettings: IvySettings = new IvySettings
// Directories for caching downloads through ivy and storing the jars when maven coordinates
// are supplied to spark-submit
val alternateIvyCache = ivyPath.getOrElse("")
val packagesDirectory: File =
if (alternateIvyCache.trim.isEmpty) {
new File(ivySettings.getDefaultIvyUserDir, "jars")
try {
// To prevent ivy from logging to system out
System.setOut(printStream)
val artifacts = extractMavenCoordinates(coordinates)
// Default configuration name for ivy
val ivyConfName = "default"
// set ivy settings for location of cache
val ivySettings: IvySettings = new IvySettings
// Directories for caching downloads through ivy and storing the jars when maven coordinates
// are supplied to spark-submit
val alternateIvyCache = ivyPath.getOrElse("")
val packagesDirectory: File =
if (alternateIvyCache.trim.isEmpty) {
new File(ivySettings.getDefaultIvyUserDir, "jars")
} else {
ivySettings.setDefaultIvyUserDir(new File(alternateIvyCache))
ivySettings.setDefaultCache(new File(alternateIvyCache, "cache"))
new File(alternateIvyCache, "jars")
}
printStream.println(
s"Ivy Default Cache set to: ${ivySettings.getDefaultCache.getAbsolutePath}")
printStream.println(s"The jars for the packages stored in: $packagesDirectory")
// create a pattern matcher
ivySettings.addMatcher(new GlobPatternMatcher)
// create the dependency resolvers
val repoResolver = createRepoResolvers(remoteRepos, ivySettings)
ivySettings.addResolver(repoResolver)
ivySettings.setDefaultResolver(repoResolver.getName)
val ivy = Ivy.newInstance(ivySettings)
// Set resolve options to download transitive dependencies as well
val resolveOptions = new ResolveOptions
resolveOptions.setTransitive(true)
val retrieveOptions = new RetrieveOptions
// Turn downloading and logging off for testing
if (isTest) {
resolveOptions.setDownload(false)
resolveOptions.setLog(LogOptions.LOG_QUIET)
retrieveOptions.setLog(LogOptions.LOG_QUIET)
} else {
ivySettings.setDefaultIvyUserDir(new File(alternateIvyCache))
ivySettings.setDefaultCache(new File(alternateIvyCache, "cache"))
new File(alternateIvyCache, "jars")
resolveOptions.setDownload(true)
}
printStream.println(
s"Ivy Default Cache set to: ${ivySettings.getDefaultCache.getAbsolutePath}")
printStream.println(s"The jars for the packages stored in: $packagesDirectory")
// create a pattern matcher
ivySettings.addMatcher(new GlobPatternMatcher)
// create the dependency resolvers
val repoResolver = createRepoResolvers(remoteRepos, ivySettings)
ivySettings.addResolver(repoResolver)
ivySettings.setDefaultResolver(repoResolver.getName)
val ivy = Ivy.newInstance(ivySettings)
// Set resolve options to download transitive dependencies as well
val resolveOptions = new ResolveOptions
resolveOptions.setTransitive(true)
val retrieveOptions = new RetrieveOptions
// Turn downloading and logging off for testing
if (isTest) {
resolveOptions.setDownload(false)
resolveOptions.setLog(LogOptions.LOG_QUIET)
retrieveOptions.setLog(LogOptions.LOG_QUIET)
} else {
resolveOptions.setDownload(true)
// A Module descriptor must be specified. Entries are dummy strings
val md = getModuleDescriptor
md.setDefaultConf(ivyConfName)
// Add exclusion rules for Spark and Scala Library
addExclusionRules(ivySettings, ivyConfName, md)
// add all supplied maven artifacts as dependencies
addDependenciesToIvy(md, artifacts, ivyConfName)
// resolve dependencies
val rr: ResolveReport = ivy.resolve(md, resolveOptions)
if (rr.hasError) {
throw new RuntimeException(rr.getAllProblemMessages.toString)
}
// retrieve all resolved dependencies
ivy.retrieve(rr.getModuleDescriptor.getModuleRevisionId,
packagesDirectory.getAbsolutePath + File.separator +
"[organization]_[artifact]-[revision].[ext]",
retrieveOptions.setConfs(Array(ivyConfName)))
resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory)
} finally {
System.setOut(sysOut)
}
// A Module descriptor must be specified. Entries are dummy strings
val md = getModuleDescriptor
md.setDefaultConf(ivyConfName)
// Add exclusion rules for Spark and Scala Library
addExclusionRules(ivySettings, ivyConfName, md)
// add all supplied maven artifacts as dependencies
addDependenciesToIvy(md, artifacts, ivyConfName)
// resolve dependencies
val rr: ResolveReport = ivy.resolve(md, resolveOptions)
if (rr.hasError) {
throw new RuntimeException(rr.getAllProblemMessages.toString)
}
// retrieve all resolved dependencies
ivy.retrieve(rr.getModuleDescriptor.getModuleRevisionId,
packagesDirectory.getAbsolutePath + File.separator +
"[organization]_[artifact]-[revision].[ext]",
retrieveOptions.setConfs(Array(ivyConfName)))
System.setOut(sysOut)
resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory)
}
}
}

View file

@ -0,0 +1,261 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io.{File, FileInputStream, FileOutputStream}
import java.util.jar.{JarEntry, JarOutputStream}
import com.google.common.io.{Files, ByteStreams}
import org.apache.commons.io.FileUtils
import org.apache.spark.TestUtils.{createCompiledClass, JavaSourceFromString}
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
private[deploy] object IvyTestUtils {
/**
* Create the path for the jar and pom from the maven coordinate. Extension should be `jar`
* or `pom`.
*/
private def pathFromCoordinate(
artifact: MavenCoordinate,
prefix: File,
ext: String,
useIvyLayout: Boolean): File = {
val groupDirs = artifact.groupId.replace(".", File.separator)
val artifactDirs = artifact.artifactId
val artifactPath =
if (!useIvyLayout) {
Seq(groupDirs, artifactDirs, artifact.version).mkString(File.separator)
} else {
Seq(groupDirs, artifactDirs, artifact.version, ext + "s").mkString(File.separator)
}
new File(prefix, artifactPath)
}
private def artifactName(artifact: MavenCoordinate, ext: String = ".jar"): String = {
s"${artifact.artifactId}-${artifact.version}$ext"
}
/** Write the contents to a file to the supplied directory. */
private def writeFile(dir: File, fileName: String, contents: String): File = {
val outputFile = new File(dir, fileName)
val outputStream = new FileOutputStream(outputFile)
outputStream.write(contents.toCharArray.map(_.toByte))
outputStream.close()
outputFile
}
/** Create an example Python file. */
private def createPythonFile(dir: File): File = {
val contents =
"""def myfunc(x):
| return x + 1
""".stripMargin
writeFile(dir, "mylib.py", contents)
}
/** Create a simple testable Class. */
private def createJavaClass(dir: File, className: String, packageName: String): File = {
val contents =
s"""package $packageName;
|
|import java.lang.Integer;
|
|class $className implements java.io.Serializable {
|
| public $className() {}
|
| public Integer myFunc(Integer x) {
| return x + 1;
| }
|}
""".stripMargin
val sourceFile =
new JavaSourceFromString(new File(dir, className + ".java").getAbsolutePath, contents)
createCompiledClass(className, dir, sourceFile, Seq.empty)
}
/** Helper method to write artifact information in the pom. */
private def pomArtifactWriter(artifact: MavenCoordinate, tabCount: Int = 1): String = {
var result = "\n" + " " * tabCount + s"<groupId>${artifact.groupId}</groupId>"
result += "\n" + " " * tabCount + s"<artifactId>${artifact.artifactId}</artifactId>"
result += "\n" + " " * tabCount + s"<version>${artifact.version}</version>"
result
}
/** Create a pom file for this artifact. */
private def createPom(
dir: File,
artifact: MavenCoordinate,
dependencies: Option[Seq[MavenCoordinate]]): File = {
var content = """
|<?xml version="1.0" encoding="UTF-8"?>
|<project xmlns="http://maven.apache.org/POM/4.0.0"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
| http://maven.apache.org/xsd/maven-4.0.0.xsd">
| <modelVersion>4.0.0</modelVersion>
""".stripMargin.trim
content += pomArtifactWriter(artifact)
content += dependencies.map { deps =>
val inside = deps.map { dep =>
"\t<dependency>" + pomArtifactWriter(dep, 3) + "\n\t</dependency>"
}.mkString("\n")
"\n <dependencies>\n" + inside + "\n </dependencies>"
}.getOrElse("")
content += "\n</project>"
writeFile(dir, artifactName(artifact, ".pom"), content.trim)
}
/** Create the jar for the given maven coordinate, using the supplied files. */
private def packJar(
dir: File,
artifact: MavenCoordinate,
files: Seq[(String, File)]): File = {
val jarFile = new File(dir, artifactName(artifact))
val jarFileStream = new FileOutputStream(jarFile)
val jarStream = new JarOutputStream(jarFileStream, new java.util.jar.Manifest())
for (file <- files) {
val jarEntry = new JarEntry(file._1)
jarStream.putNextEntry(jarEntry)
val in = new FileInputStream(file._2)
ByteStreams.copy(in, jarStream)
in.close()
}
jarStream.close()
jarFileStream.close()
jarFile
}
/**
* Creates a jar and pom file, mocking a Maven repository. The root path can be supplied with
* `tempDir`, dependencies can be created into the same repo, and python files can also be packed
* inside the jar.
*
* @param artifact The maven coordinate to generate the jar and pom for.
* @param dependencies List of dependencies this artifact might have to also create jars and poms.
* @param tempDir The root folder of the repository
* @param useIvyLayout whether to mock the Ivy layout for local repository testing
* @param withPython Whether to pack python files inside the jar for extensive testing.
* @return Root path of the repository
*/
private def createLocalRepository(
artifact: MavenCoordinate,
dependencies: Option[Seq[MavenCoordinate]] = None,
tempDir: Option[File] = None,
useIvyLayout: Boolean = false,
withPython: Boolean = false): File = {
// Where the root of the repository exists, and what Ivy will search in
val tempPath = tempDir.getOrElse(Files.createTempDir())
// Create directory if it doesn't exist
Files.createParentDirs(tempPath)
// Where to create temporary class files and such
val root = new File(tempPath, tempPath.hashCode().toString)
Files.createParentDirs(new File(root, "dummy"))
try {
val jarPath = pathFromCoordinate(artifact, tempPath, "jar", useIvyLayout)
Files.createParentDirs(new File(jarPath, "dummy"))
val className = "MyLib"
val javaClass = createJavaClass(root, className, artifact.groupId)
// A tuple of files representation in the jar, and the file
val javaFile = (artifact.groupId.replace(".", "/") + "/" + javaClass.getName, javaClass)
val allFiles =
if (withPython) {
val pythonFile = createPythonFile(root)
Seq(javaFile, (pythonFile.getName, pythonFile))
} else {
Seq(javaFile)
}
val jarFile = packJar(jarPath, artifact, allFiles)
assert(jarFile.exists(), "Problem creating Jar file")
val pomPath = pathFromCoordinate(artifact, tempPath, "pom", useIvyLayout)
Files.createParentDirs(new File(pomPath, "dummy"))
val pomFile = createPom(pomPath, artifact, dependencies)
assert(pomFile.exists(), "Problem creating Pom file")
} finally {
FileUtils.deleteDirectory(root)
}
tempPath
}
/**
* Creates a suite of jars and poms, with or without dependencies, mocking a maven repository.
* @param artifact The main maven coordinate to generate the jar and pom for.
* @param dependencies List of dependencies this artifact might have to also create jars and poms.
* @param rootDir The root folder of the repository (like `~/.m2/repositories`)
* @param useIvyLayout whether to mock the Ivy layout for local repository testing
* @param withPython Whether to pack python files inside the jar for extensive testing.
* @return Root path of the repository. Will be `rootDir` if supplied.
*/
private[deploy] def createLocalRepositoryForTests(
artifact: MavenCoordinate,
dependencies: Option[String],
rootDir: Option[File],
useIvyLayout: Boolean = false,
withPython: Boolean = false): File = {
val deps = dependencies.map(SparkSubmitUtils.extractMavenCoordinates)
val mainRepo = createLocalRepository(artifact, deps, rootDir, useIvyLayout, withPython)
deps.foreach { seq => seq.foreach { dep =>
createLocalRepository(dep, None, Some(mainRepo), useIvyLayout, withPython = false)
}}
mainRepo
}
/**
* Creates a repository for a test, and cleans it up afterwards.
*
* @param artifact The main maven coordinate to generate the jar and pom for.
* @param dependencies List of dependencies this artifact might have to also create jars and poms.
* @param rootDir The root folder of the repository (like `~/.m2/repositories`)
* @param useIvyLayout whether to mock the Ivy layout for local repository testing
* @param withPython Whether to pack python files inside the jar for extensive testing.
* @return Root path of the repository. Will be `rootDir` if supplied.
*/
private[deploy] def withRepository(
artifact: MavenCoordinate,
dependencies: Option[String],
rootDir: Option[File],
useIvyLayout: Boolean = false,
withPython: Boolean = false)(f: String => Unit): Unit = {
val repo = createLocalRepositoryForTests(artifact, dependencies, rootDir, useIvyLayout,
withPython)
try {
f(repo.toURI.toString)
} finally {
// Clean up
if (repo.toString.contains(".m2") || repo.toString.contains(".ivy2")) {
FileUtils.deleteDirectory(new File(repo,
artifact.groupId.replace(".", File.separator) + File.separator + artifact.artifactId))
dependencies.map(SparkSubmitUtils.extractMavenCoordinates).foreach { seq =>
seq.foreach { dep =>
FileUtils.deleteDirectory(new File(repo,
dep.artifactId.replace(".", File.separator)))
}
}
} else {
FileUtils.deleteDirectory(repo)
}
}
}
}

View file

@ -30,6 +30,7 @@ import org.scalatest.time.SpanSugar._
import org.apache.spark._
import org.apache.spark.deploy.SparkSubmit._
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
import org.apache.spark.util.{ResetSystemProperties, Utils}
// Note: this suite mixes in ResetSystemProperties because SparkSubmit.main() sets a bunch
@ -334,18 +335,22 @@ class SparkSubmitSuite extends FunSuite with Matchers with ResetSystemProperties
runSparkSubmit(args)
}
ignore("includes jars passed in through --packages") {
test("includes jars passed in through --packages") {
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val packagesString = "com.databricks:spark-csv_2.10:0.1,com.databricks:spark-avro_2.10:0.1"
val args = Seq(
"--class", JarCreationTest.getClass.getName.stripSuffix("$"),
"--name", "testApp",
"--master", "local-cluster[2,1,512]",
"--packages", packagesString,
"--conf", "spark.ui.enabled=false",
unusedJar.toString,
"com.databricks.spark.csv.DefaultSource", "com.databricks.spark.avro.DefaultSource")
runSparkSubmit(args)
val main = MavenCoordinate("my.great.lib", "mylib", "0.1")
val dep = MavenCoordinate("my.great.dep", "mylib", "0.1")
IvyTestUtils.withRepository(main, Some(dep.toString), None) { repo =>
val args = Seq(
"--class", JarCreationTest.getClass.getName.stripSuffix("$"),
"--name", "testApp",
"--master", "local-cluster[2,1,512]",
"--packages", Seq(main, dep).mkString(","),
"--repositories", repo,
"--conf", "spark.ui.enabled=false",
unusedJar.toString,
"my.great.lib.MyLib", "my.great.dep.MyLib")
runSparkSubmit(args)
}
}
test("resolves command line argument paths correctly") {

View file

@ -17,17 +17,17 @@
package org.apache.spark.deploy
import java.io.{PrintStream, OutputStream, File}
import org.apache.ivy.core.settings.IvySettings
import java.io.{File, PrintStream, OutputStream}
import scala.collection.mutable.ArrayBuffer
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.apache.ivy.core.module.descriptor.MDArtifact
import org.apache.ivy.core.settings.IvySettings
import org.apache.ivy.plugins.resolver.IBiblioResolver
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll {
private val noOpOutputStream = new OutputStream {
@ -89,7 +89,7 @@ class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll {
}
test("ivy path works correctly") {
val ivyPath = "dummy/ivy"
val ivyPath = "dummy" + File.separator + "ivy"
val md = SparkSubmitUtils.getModuleDescriptor
val artifacts = for (i <- 0 until 3) yield new MDArtifact(md, s"jar-$i", "jar", "jar")
var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, new File(ivyPath))
@ -98,17 +98,38 @@ class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll {
assert(index >= 0)
jPaths = jPaths.substring(index + ivyPath.length)
}
// end to end
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
"com.databricks:spark-csv_2.10:0.1", None, Option(ivyPath), true)
assert(jarPath.indexOf(ivyPath) >= 0, "should use non-default ivy path")
val main = MavenCoordinate("my.awesome.lib", "mylib", "0.1")
IvyTestUtils.withRepository(main, None, None) { repo =>
// end to end
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, Option(repo),
Option(ivyPath), true)
assert(jarPath.indexOf(ivyPath) >= 0, "should use non-default ivy path")
}
}
ignore("search for artifact at other repositories") {
val path = SparkSubmitUtils.resolveMavenCoordinates("com.agimatec:agimatec-validation:0.9.3",
Option("https://oss.sonatype.org/content/repositories/agimatec/"), None, true)
assert(path.indexOf("agimatec-validation") >= 0, "should find package. If it doesn't, check" +
"if package still exists. If it has been removed, replace the example in this test.")
test("search for artifact at local repositories") {
val main = new MavenCoordinate("my.awesome.lib", "mylib", "0.1")
// Local M2 repository
IvyTestUtils.withRepository(main, None, Some(SparkSubmitUtils.m2Path)) { repo =>
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, None, None, true)
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
}
// Local Ivy Repository
val settings = new IvySettings
val ivyLocal = new File(settings.getDefaultIvyUserDir, "local" + File.separator)
IvyTestUtils.withRepository(main, None, Some(ivyLocal), true) { repo =>
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, None, None, true)
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
}
// Local ivy repository with modified home
val dummyIvyPath = "dummy" + File.separator + "ivy"
val dummyIvyLocal = new File(dummyIvyPath, "local" + File.separator)
IvyTestUtils.withRepository(main, None, Some(dummyIvyLocal), true) { repo =>
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, None,
Some(dummyIvyPath), true)
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
assert(jarPath.indexOf(dummyIvyPath) >= 0, "should be in new ivy path")
}
}
test("dependency not found throws RuntimeException") {
@ -117,7 +138,7 @@ class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll {
}
}
ignore("neglects Spark and Spark's dependencies") {
test("neglects Spark and Spark's dependencies") {
val components = Seq("bagel_", "catalyst_", "core_", "graphx_", "hive_", "mllib_", "repl_",
"sql_", "streaming_", "yarn_", "network-common_", "network-shuffle_", "network-yarn_")
@ -127,11 +148,11 @@ class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll {
val path = SparkSubmitUtils.resolveMavenCoordinates(coordinates, None, None, true)
assert(path === "", "should return empty path")
// Should not exclude the following dependency. Will throw an error, because it doesn't exist,
// but the fact that it is checking means that it wasn't excluded.
intercept[RuntimeException] {
SparkSubmitUtils.resolveMavenCoordinates(coordinates +
",org.apache.spark:spark-streaming-kafka-assembly_2.10:1.2.0", None, None, true)
val main = MavenCoordinate("org.apache.spark", "spark-streaming-kafka-assembly_2.10", "1.2.0")
IvyTestUtils.withRepository(main, None, None) { repo =>
val files = SparkSubmitUtils.resolveMavenCoordinates(coordinates + "," + main.toString,
Some(repo), None, true)
assert(files.indexOf(main.artifactId) >= 0, "Did not return artifact")
}
}
}