cdea1d1343
## What changes were proposed in this pull request? Docker tests are using older version of jersey jars (1.19), which was used in older releases of spark. In 2.0 releases Spark was upgraded to use 2.x verison of Jersey. After upgrade to new versions, docker tests are failing with AbstractMethodError. Now that spark is upgraded to 2.x jersey version, using of shaded docker jars may not be required any more. Removed the exclusions/overrides of jersey related classes from pom file, and changed the docker-client to use regular jar instead of shaded one. ## How was this patch tested? Tested using existing docker-integration-tests Author: sureshthalamati <suresh.thalamati@gmail.com> Closes #15114 from sureshthalamati/docker_testfix-spark-17473.
148 lines
5.2 KiB
XML
148 lines
5.2 KiB
XML
<?xml version="1.0" encoding="UTF-8"?>
|
|
<!--
|
|
~ Licensed to the Apache Software Foundation (ASF) under one or more
|
|
~ contributor license agreements. See the NOTICE file distributed with
|
|
~ this work for additional information regarding copyright ownership.
|
|
~ The ASF licenses this file to You under the Apache License, Version 2.0
|
|
~ (the "License"); you may not use this file except in compliance with
|
|
~ the License. You may obtain a copy of the License at
|
|
~
|
|
~ http://www.apache.org/licenses/LICENSE-2.0
|
|
~
|
|
~ Unless required by applicable law or agreed to in writing, software
|
|
~ distributed under the License is distributed on an "AS IS" BASIS,
|
|
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
~ See the License for the specific language governing permissions and
|
|
~ limitations under the License.
|
|
-->
|
|
|
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
<modelVersion>4.0.0</modelVersion>
|
|
<parent>
|
|
<groupId>org.apache.spark</groupId>
|
|
<artifactId>spark-parent_2.11</artifactId>
|
|
<version>2.1.0-SNAPSHOT</version>
|
|
<relativePath>../../pom.xml</relativePath>
|
|
</parent>
|
|
|
|
<artifactId>spark-docker-integration-tests_2.11</artifactId>
|
|
<packaging>jar</packaging>
|
|
<name>Spark Project Docker Integration Tests</name>
|
|
<url>http://spark.apache.org/</url>
|
|
<properties>
|
|
<sbt.project.name>docker-integration-tests</sbt.project.name>
|
|
</properties>
|
|
|
|
<repositories>
|
|
<repository>
|
|
<id>db</id>
|
|
<url>https://app.camunda.com/nexus/content/repositories/public/</url>
|
|
<releases>
|
|
<enabled>true</enabled>
|
|
<checksumPolicy>warn</checksumPolicy>
|
|
</releases>
|
|
</repository>
|
|
</repositories>
|
|
|
|
<dependencies>
|
|
<dependency>
|
|
<groupId>com.spotify</groupId>
|
|
<artifactId>docker-client</artifactId>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<dependency>
|
|
<groupId>org.apache.httpcomponents</groupId>
|
|
<artifactId>httpclient</artifactId>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<dependency>
|
|
<groupId>org.apache.httpcomponents</groupId>
|
|
<artifactId>httpcore</artifactId>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<!-- Necessary in order to avoid errors in log messages: -->
|
|
<dependency>
|
|
<groupId>com.google.guava</groupId>
|
|
<artifactId>guava</artifactId>
|
|
<version>18.0</version>
|
|
</dependency>
|
|
<dependency>
|
|
<groupId>org.apache.spark</groupId>
|
|
<artifactId>spark-core_${scala.binary.version}</artifactId>
|
|
<version>${project.version}</version>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<dependency>
|
|
<groupId>org.apache.spark</groupId>
|
|
<artifactId>spark-core_${scala.binary.version}</artifactId>
|
|
<version>${project.version}</version>
|
|
<type>test-jar</type>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<dependency>
|
|
<groupId>org.apache.spark</groupId>
|
|
<artifactId>spark-sql_${scala.binary.version}</artifactId>
|
|
<version>${project.version}</version>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<dependency>
|
|
<groupId>org.apache.spark</groupId>
|
|
<artifactId>spark-sql_${scala.binary.version}</artifactId>
|
|
<version>${project.version}</version>
|
|
<type>test-jar</type>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<dependency>
|
|
<groupId>org.apache.spark</groupId>
|
|
<artifactId>spark-tags_${scala.binary.version}</artifactId>
|
|
<version>${project.version}</version>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<dependency>
|
|
<groupId>mysql</groupId>
|
|
<artifactId>mysql-connector-java</artifactId>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<dependency>
|
|
<groupId>org.postgresql</groupId>
|
|
<artifactId>postgresql</artifactId>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
<!-- Oracle ojdbc jar, used for oracle integration suite for docker testing.
|
|
See https://github.com/apache/spark/pull/11306 for background on why we need
|
|
to use a an ojdbc jar for the testcase. The maven dependency here is commented
|
|
because currently the maven repository does not contain the ojdbc jar mentioned.
|
|
Once the jar is available in maven, this could be uncommented. -->
|
|
<dependency>
|
|
<groupId>com.oracle</groupId>
|
|
<artifactId>ojdbc6</artifactId>
|
|
<version>11.2.0.1.0</version>
|
|
<scope>test</scope>
|
|
</dependency>
|
|
|
|
<!-- DB2 JCC driver manual installation instructions
|
|
|
|
You can build this datasource if you:
|
|
1) have the DB2 artifacts installed in a local repo and supply the URL:
|
|
-Dmaven.repo.drivers=http://my.local.repo
|
|
|
|
2) have a copy of the DB2 JCC driver and run the following commands :
|
|
mvn install:install-file -Dfile=${path to db2jcc4.jar} \
|
|
-DgroupId=com.ibm.db2 \
|
|
-DartifactId=db2jcc4 \
|
|
-Dversion=10.5 \
|
|
-Dpackaging=jar
|
|
|
|
Note: IBM DB2 JCC driver is available for download at
|
|
http://www-01.ibm.com/support/docview.wss?uid=swg21363866
|
|
-->
|
|
<dependency>
|
|
<groupId>com.ibm.db2.jcc</groupId>
|
|
<artifactId>db2jcc4</artifactId>
|
|
<version>10.5.0.5</version>
|
|
<type>jar</type>
|
|
</dependency>
|
|
</dependencies>
|
|
</project>
|