You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by mi...@apache.org on 2017/03/07 05:05:38 UTC
zeppelin git commit: [ZEPPELIN-2139] Interpreters based on scala_2.11
aren't installed correctly
Repository: zeppelin
Updated Branches:
refs/heads/master 77778e35e -> e6b32c0f2
[ZEPPELIN-2139] Interpreters based on scala_2.11 aren't installed correctly
### What is this PR for?
pom variables such as `${scala.version}` are not replaced to value when you do `mvn install`.
This makes leaf poms to look for this value in parent pom, which is always `2.10.5` for `${scala.version}` and it causes scala library dependency conflict. For example, zeppelin-flink_2.11 will have scala 2.10.5 as dependency.
This PR fixes this problem by using maven flatten plugin.
### What type of PR is it?
Bug Fix
### What is the Jira issue?
[ZEPPELIN-2139](https://issues.apache.org/jira/browse/ZEPPELIN-2139)
### How should this be tested?
```
$ ./dev/change_scala_version.sh 2.11
$ mvn clean install -pl 'zeppelin-server,zeppelin-zengine,zeppelin-interpreter,flink' -am -DskipRat -DskipTests -Pscala-2.11
```
Open `~/.m2/repository/org/apache/zeppelin/zeppelin-flink_2.11/0.8.0-SNAPSHOT/zeppelin-flink_2.11-0.8.0-SNAPSHOT.pom` file and see if scala related libraries dependency version is set to 2.11.7
### Screenshots (if appropriate)
### Questions:
* Does the licenses files need update? no
* Is there breaking changes for older versions? no
* Does this needs documentation? no
Author: Mina Lee <mi...@apache.org>
Closes #2059 from minahlee/ZEPPELIN-2139 and squashes the following commits:
62d852a [Mina Lee] Change <scala.version> property in parent pom file
489c843 [Mina Lee] Use maven flatten plugin to make pom.xml variables to be replaced by value
783c014 [Mina Lee] Fix indentation and add default properties to be used in flattened-pom
Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/e6b32c0f
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/e6b32c0f
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/e6b32c0f
Branch: refs/heads/master
Commit: e6b32c0f244b7c68e243f9076291c341ee2a1eb5
Parents: 77778e3
Author: Mina Lee <mi...@apache.org>
Authored: Thu Feb 23 18:21:35 2017 +0900
Committer: Mina Lee <mi...@apache.org>
Committed: Tue Mar 7 14:05:31 2017 +0900
----------------------------------------------------------------------
.gitignore | 3 +
dev/change_scala_version.sh | 24 +-
livy/pom.xml | 1103 +++++++++++++++++++-------------------
pom.xml | 28 +-
4 files changed, 599 insertions(+), 559 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e6b32c0f/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 8db4291..bd9fc78 100644
--- a/.gitignore
+++ b/.gitignore
@@ -100,6 +100,9 @@ Thumbs.db
target/
**/target/
+# maven flattened pom files
+**/.flattened-pom.xml
+
# Generated by Jekyll
docs/_site/
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e6b32c0f/dev/change_scala_version.sh
----------------------------------------------------------------------
diff --git a/dev/change_scala_version.sh b/dev/change_scala_version.sh
index cb2c842..0ccfe7e 100755
--- a/dev/change_scala_version.sh
+++ b/dev/change_scala_version.sh
@@ -34,7 +34,7 @@ if [[ ($# -ne 1) || ( $1 == "--help") || $1 == "-h" ]]; then
usage
fi
-TO_VERSION=$1
+TO_VERSION="$1"
check_scala_version() {
for i in ${VALID_VERSIONS[*]}; do [ $i = "$1" ] && return 0; done
@@ -42,12 +42,14 @@ check_scala_version() {
exit 1
}
-check_scala_version "$TO_VERSION"
+check_scala_version "${TO_VERSION}"
-if [ $TO_VERSION = "2.11" ]; then
+if [ "${TO_VERSION}" = "2.11" ]; then
FROM_VERSION="2.10"
+ SCALA_LIB_VERSION="2.11.7"
else
FROM_VERSION="2.11"
+ SCALA_LIB_VERSION="2.10.5"
fi
sed_i() {
@@ -57,11 +59,17 @@ sed_i() {
export -f sed_i
BASEDIR=$(dirname $0)/..
-find "$BASEDIR" -name 'pom.xml' -not -path '*target*' -print \
- -exec bash -c "sed_i 's/\(artifactId.*\)_'$FROM_VERSION'/\1_'$TO_VERSION'/g' {}" \;
+find "${BASEDIR}" -name 'pom.xml' -not -path '*target*' -print \
+ -exec bash -c "sed_i 's/\(artifactId.*\)_'${FROM_VERSION}'/\1_'${TO_VERSION}'/g' {}" \;
-# Also update <scala.binary.version> in parent POM
+# update <scala.binary.version> in parent POM
# Match any scala binary version to ensure idempotency
-sed_i '1,/<scala\.binary\.version>[0-9]*\.[0-9]*</s/<scala\.binary\.version>[0-9]*\.[0-9]*</<scala.binary.version>'$TO_VERSION'</' \
- "$BASEDIR/pom.xml"
+sed_i '1,/<scala\.binary\.version>[0-9]*\.[0-9]*</s/<scala\.binary\.version>[0-9]*\.[0-9]*</<scala.binary.version>'${TO_VERSION}'</' \
+ "${BASEDIR}/pom.xml"
+# update <scala.version> in parent POM
+# This is to make variables in leaf pom to be substituted to real value when flattened-pom is created.
+# maven-flatten plugin doesn't take properties defined under profile even if scala-2.11/scala-2.10 is activated via -Pscala-2.11/-Pscala-2.10,
+# and use default defined properties to create flatten pom.
+sed_i '1,/<scala\.version>[0-9]*\.[0-9]*\.[0-9]*</s/<scala\.version>[0-9]*\.[0-9]*\.[0-9]*</<scala.version>'${SCALA_LIB_VERSION}'</' \
+ "${BASEDIR}/pom.xml"
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e6b32c0f/livy/pom.xml
----------------------------------------------------------------------
diff --git a/livy/pom.xml b/livy/pom.xml
index 66ababe..3c121fc 100644
--- a/livy/pom.xml
+++ b/livy/pom.xml
@@ -16,562 +16,565 @@
~ limitations under the License.
-->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <artifactId>zeppelin</artifactId>
- <groupId>org.apache.zeppelin</groupId>
- <version>0.8.0-SNAPSHOT</version>
- <relativePath>..</relativePath>
- </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>zeppelin</artifactId>
<groupId>org.apache.zeppelin</groupId>
- <artifactId>zeppelin-livy</artifactId>
- <packaging>jar</packaging>
<version>0.8.0-SNAPSHOT</version>
- <name>Zeppelin: Livy interpreter</name>
-
- <properties>
- <!--library versions-->
- <commons.exec.version>1.3</commons.exec.version>
- <httpcomponents.client.version>4.3.4</httpcomponents.client.version>
- <spring.web.version>4.3.0.RELEASE</spring.web.version>
- <spring.security.kerberosclient>1.0.1.RELEASE</spring.security.kerberosclient>
-
- <!--test library versions-->
- <achilles.version>3.2.4-Zeppelin</achilles.version>
- <assertj.version>1.7.0</assertj.version>
- <mockito.version>1.9.5</mockito.version>
- <!--plugin versions-->
- <plugin.failsafe.version>2.16</plugin.failsafe.version>
- <plugin.antrun.version>1.8</plugin.antrun.version>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>${project.groupId}</groupId>
- <artifactId>zeppelin-interpreter</artifactId>
- <version>${project.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-exec</artifactId>
- <version>${commons.exec.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
- <version>${httpcomponents.client.version}</version>
- </dependency>
-
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.springframework.security.kerberos</groupId>
- <artifactId>spring-security-kerberos-client</artifactId>
- <version>${spring.security.kerberosclient}</version>
- </dependency>
-
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-web</artifactId>
- <version>${spring.web.version}</version>
- </dependency>
-
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.assertj</groupId>
- <artifactId>assertj-core</artifactId>
- <version>${assertj.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-all</artifactId>
- <version>${mockito.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>com.cloudera.livy</groupId>
- <artifactId>livy-integration-test</artifactId>
- <version>${livy.version}</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>org.xerial.snappy</groupId>
- <artifactId>snappy-java</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-core_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-sql_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-streaming_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-hive_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-repl_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-yarn_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-auth</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-client</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-client</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-server-tests</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>com.cloudera.livy</groupId>
- <artifactId>livy-test-lib</artifactId>
- <version>${livy.version}</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>org.xerial.snappy</groupId>
- <artifactId>snappy-java</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-core_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-sql_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-streaming_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-hive_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-repl_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-yarn_${scala.binary.version}</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
+ <relativePath>..</relativePath>
+ </parent>
+
+ <groupId>org.apache.zeppelin</groupId>
+ <artifactId>zeppelin-livy</artifactId>
+ <packaging>jar</packaging>
+ <version>0.8.0-SNAPSHOT</version>
+ <name>Zeppelin: Livy interpreter</name>
+
+ <properties>
+ <!--library versions-->
+ <commons.exec.version>1.3</commons.exec.version>
+ <httpcomponents.client.version>4.3.4</httpcomponents.client.version>
+ <spring.web.version>4.3.0.RELEASE</spring.web.version>
+ <spring.security.kerberosclient>1.0.1.RELEASE</spring.security.kerberosclient>
+
+ <!--test library versions-->
+ <achilles.version>3.2.4-Zeppelin</achilles.version>
+ <assertj.version>1.7.0</assertj.version>
+ <mockito.version>1.9.5</mockito.version>
+ <livy.version>0.3.0</livy.version>
+ <spark.version>2.1.0</spark.version>
+ <hadoop.version>2.6.0</hadoop.version>
+ <!--plugin versions-->
+ <plugin.failsafe.version>2.16</plugin.failsafe.version>
+ <plugin.antrun.version>1.8</plugin.antrun.version>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>zeppelin-interpreter</artifactId>
+ <version>${project.version}</version>
+ <scope>provided</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-exec</artifactId>
+ <version>${commons.exec.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ <version>${httpcomponents.client.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.google.code.gson</groupId>
+ <artifactId>gson</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.springframework.security.kerberos</groupId>
+ <artifactId>spring-security-kerberos-client</artifactId>
+ <version>${spring.security.kerberosclient}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-web</artifactId>
+ <version>${spring.web.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <version>${assertj.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>${mockito.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.cloudera.livy</groupId>
+ <artifactId>livy-integration-test</artifactId>
+ <version>${livy.version}</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.xerial.snappy</groupId>
+ <artifactId>snappy-java</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-core_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-sql_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-streaming_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-hive_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-repl_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-yarn_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-auth</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-client</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-tests</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>com.cloudera.livy</groupId>
+ <artifactId>livy-test-lib</artifactId>
+ <version>${livy.version}</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.xerial.snappy</groupId>
+ <artifactId>snappy-java</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-core_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-sql_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-streaming_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-hive_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-repl_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-yarn_${scala.binary.version}</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-sql_${scala.binary.version}</artifactId>
+ <version>${spark.version}</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>com.esotericsoftware</groupId>
+ <artifactId>kryo-shaded</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-streaming_${scala.binary.version}</artifactId>
+ <version>${spark.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-hive_${scala.binary.version}</artifactId>
+ <version>${spark.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-repl_${scala.binary.version}</artifactId>
+ <version>${spark.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-yarn_${scala.binary.version}</artifactId>
+ <version>${spark.version}</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-auth</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <classifier>tests</classifier>
+ <version>${hadoop.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>io.netty</groupId>
+ <artifactId>netty</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <classifier>tests</classifier>
+ <version>${hadoop.version}</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>io.netty</groupId>
+ <artifactId>netty</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-client</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-tests</artifactId>
+ <classifier>tests</classifier>
+ <version>${hadoop.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <repositories>
+ <repository>
+ <id>ossrh</id>
+ <name>ossrh repository</name>
+ <url>https://oss.sonatype.org/content/repositories/releases/</url>
+ <releases>
+ <enabled>true</enabled>
+ </releases>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ </repository>
+ </repositories>
+
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>enforce</id>
+ <phase>none</phase>
+ </execution>
+ </executions>
+ </plugin>
+
+ <plugin>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/../../interpreter/livy
+ </outputDirectory>
+ <overWriteReleases>false</overWriteReleases>
+ <overWriteSnapshots>false</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ <includeScope>runtime</includeScope>
+ </configuration>
+ </execution>
+ <execution>
+ <id>copy-artifact</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/../../interpreter/livy
+ </outputDirectory>
+ <overWriteReleases>false</overWriteReleases>
+ <overWriteSnapshots>false</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ <includeScope>runtime</includeScope>
+ <artifactItems>
+ <artifactItem>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>${project.artifactId}</artifactId>
+ <version>${project.version}</version>
+ <type>${project.packaging}</type>
+ </artifactItem>
+ </artifactItems>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+
+ <plugin>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ <version>${plugin.failsafe.version}</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>integration-test</goal>
+ <goal>verify</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <systemPropertyVariables>
+ <java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
+ </systemPropertyVariables>
+ <environmentVariables>
+ <LIVY_SPARK_SCALA_VERSION>${scala.binary.version}</LIVY_SPARK_SCALA_VERSION>
+ <LIVY_LOG_DIR>${project.build.directory}/tmp</LIVY_LOG_DIR>
+ </environmentVariables>
+ <argLine>-Xmx2048m</argLine>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <version>${plugin.antrun.version}</version>
+ <executions>
+ <!-- Cleans up files that tests append to (because we have two test plugins). -->
+ <execution>
+ <id>pre-test-clean</id>
+ <phase>generate-test-resources</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <delete file="${project.build.directory}/unit-tests.log"
+ quiet="true"/>
+ <delete file="${project.build.directory}/jacoco.exec" quiet="true"/>
+ <delete dir="${project.build.directory}/tmp" quiet="true"/>
+ </target>
+ </configuration>
+ </execution>
+ <!-- Create the temp directory to be used by tests. -->
+ <execution>
+ <id>create-tmp-dir</id>
+ <phase>generate-test-resources</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <mkdir dir="${project.build.directory}/tmp"/>
+ </target>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+ <profiles>
+ <profile>
+ <id>livy-0.3</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <properties>
+ <livy.version>0.3.0</livy.version>
+ <spark.version>2.1.0</spark.version>
+ <hadoop.version>2.6.0</hadoop.version>
+ </properties>
+ <dependencies>
<dependency>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-sql_${scala.binary.version}</artifactId>
- <version>${spark.version}</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>com.esotericsoftware</groupId>
- <artifactId>kryo-shaded</artifactId>
- </exclusion>
- </exclusions>
+ <groupId>com.cloudera.livy</groupId>
+ <artifactId>livy-core_${scala.binary.version}</artifactId>
+ <version>0.3.0</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.xerial.snappy</groupId>
+ <artifactId>snappy-java</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-core_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-sql_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-streaming_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-hive_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-repl_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-yarn_${scala.binary.version}</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
-
- <dependency>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-streaming_${scala.binary.version}</artifactId>
- <version>${spark.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-hive_${scala.binary.version}</artifactId>
- <version>${spark.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-repl_${scala.binary.version}</artifactId>
- <version>${spark.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-yarn_${scala.binary.version}</artifactId>
- <version>${spark.version}</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-common</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-server-web-proxy</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-auth</artifactId>
- <version>${hadoop.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <version>${hadoop.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <classifier>tests</classifier>
- <version>${hadoop.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <version>${hadoop.version}</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>io.netty</groupId>
- <artifactId>netty</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <classifier>tests</classifier>
- <version>${hadoop.version}</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>io.netty</groupId>
- <artifactId>netty</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-client</artifactId>
- <version>${hadoop.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-client</artifactId>
- <version>${hadoop.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-api</artifactId>
- <version>${hadoop.version}</version>
- <scope>test</scope>
- </dependency>
-
+ </dependencies>
+ </profile>
+
+ <profile>
+ <id>livy-0.2</id>
+ <properties>
+ <livy.version>0.2.0</livy.version>
+ <spark.version>1.6.2</spark.version>
+ <hadoop.version>2.6.0</hadoop.version>
+ <scala.binary.version>2.10</scala.binary.version>
+ </properties>
+ <dependencies>
<dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-server-tests</artifactId>
- <classifier>tests</classifier>
- <version>${hadoop.version}</version>
- <scope>test</scope>
+ <groupId>com.cloudera.livy</groupId>
+ <artifactId>livy-core</artifactId>
+ <version>0.2.0</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.xerial.snappy</groupId>
+ <artifactId>snappy-java</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-core_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-sql_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-streaming_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-hive_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-repl_${scala.binary.version}</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-yarn_${scala.binary.version}</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
- </dependencies>
-
- <repositories>
- <repository>
- <id>ossrh</id>
- <name>ossrh repository</name>
- <url>https://oss.sonatype.org/content/repositories/releases/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- </repositories>
-
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-enforcer-plugin</artifactId>
- <executions>
- <execution>
- <id>enforce</id>
- <phase>none</phase>
- </execution>
- </executions>
- </plugin>
-
- <plugin>
- <artifactId>maven-dependency-plugin</artifactId>
- <executions>
- <execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- <configuration>
- <outputDirectory>${project.build.directory}/../../interpreter/livy
- </outputDirectory>
- <overWriteReleases>false</overWriteReleases>
- <overWriteSnapshots>false</overWriteSnapshots>
- <overWriteIfNewer>true</overWriteIfNewer>
- <includeScope>runtime</includeScope>
- </configuration>
- </execution>
- <execution>
- <id>copy-artifact</id>
- <phase>package</phase>
- <goals>
- <goal>copy</goal>
- </goals>
- <configuration>
- <outputDirectory>${project.build.directory}/../../interpreter/livy
- </outputDirectory>
- <overWriteReleases>false</overWriteReleases>
- <overWriteSnapshots>false</overWriteSnapshots>
- <overWriteIfNewer>true</overWriteIfNewer>
- <includeScope>runtime</includeScope>
- <artifactItems>
- <artifactItem>
- <groupId>${project.groupId}</groupId>
- <artifactId>${project.artifactId}</artifactId>
- <version>${project.version}</version>
- <type>${project.packaging}</type>
- </artifactItem>
- </artifactItems>
- </configuration>
- </execution>
- </executions>
- </plugin>
-
- <plugin>
- <artifactId>maven-failsafe-plugin</artifactId>
- <version>${plugin.failsafe.version}</version>
- <executions>
- <execution>
- <goals>
- <goal>integration-test</goal>
- <goal>verify</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <systemPropertyVariables>
- <java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
- </systemPropertyVariables>
- <environmentVariables>
- <LIVY_SPARK_SCALA_VERSION>${scala.binary.version}</LIVY_SPARK_SCALA_VERSION>
- <LIVY_LOG_DIR>${project.build.directory}/tmp</LIVY_LOG_DIR>
- </environmentVariables>
- <argLine>-Xmx2048m</argLine>
- </configuration>
- </plugin>
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <version>${plugin.antrun.version}</version>
- <executions>
- <!-- Cleans up files that tests append to (because we have two test plugins). -->
- <execution>
- <id>pre-test-clean</id>
- <phase>generate-test-resources</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <target>
- <delete file="${project.build.directory}/unit-tests.log"
- quiet="true"/>
- <delete file="${project.build.directory}/jacoco.exec" quiet="true"/>
- <delete dir="${project.build.directory}/tmp" quiet="true"/>
- </target>
- </configuration>
- </execution>
- <!-- Create the temp directory to be used by tests. -->
- <execution>
- <id>create-tmp-dir</id>
- <phase>generate-test-resources</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <target>
- <mkdir dir="${project.build.directory}/tmp"/>
- </target>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-
- <profiles>
- <profile>
- <id>livy-0.3</id>
- <activation>
- <activeByDefault>true</activeByDefault>
- </activation>
- <properties>
- <livy.version>0.3.0</livy.version>
- <spark.version>2.1.0</spark.version>
- <hadoop.version>2.6.0</hadoop.version>
- </properties>
- <dependencies>
- <dependency>
- <groupId>com.cloudera.livy</groupId>
- <artifactId>livy-core_${scala.binary.version}</artifactId>
- <version>0.3.0</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>org.xerial.snappy</groupId>
- <artifactId>snappy-java</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-core_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-sql_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-streaming_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-hive_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-repl_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-yarn_${scala.binary.version}</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- </dependencies>
- </profile>
-
- <profile>
- <id>livy-0.2</id>
- <properties>
- <livy.version>0.2.0</livy.version>
- <spark.version>1.6.2</spark.version>
- <hadoop.version>2.6.0</hadoop.version>
- <scala.binary.version>2.10</scala.binary.version>
- </properties>
- <dependencies>
- <dependency>
- <groupId>com.cloudera.livy</groupId>
- <artifactId>livy-core</artifactId>
- <version>0.2.0</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>org.xerial.snappy</groupId>
- <artifactId>snappy-java</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-core_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-sql_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-streaming_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-hive_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-repl_${scala.binary.version}</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-yarn_${scala.binary.version}</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- </dependencies>
- </profile>
- </profiles>
+ </dependencies>
+ </profile>
+ </profiles>
</project>
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e6b32c0f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index aef1e82..af2bdf7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -308,6 +308,32 @@
</configuration>
</plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>flatten-maven-plugin</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <flattenMode>ossrh</flattenMode>
+ <updatePomFile>true</updatePomFile>
+ </configuration>
+ <executions>
+ <execution>
+ <id>flatten</id>
+ <phase>process-resources</phase>
+ <goals>
+ <goal>flatten</goal>
+ </goals>
+ </execution>
+ <execution>
+ <id>flatten.clean</id>
+ <phase>clean</phase>
+ <goals>
+ <goal>clean</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+
<!-- Test coverage plugin -->
<plugin>
<groupId>org.codehaus.mojo</groupId>
@@ -475,7 +501,7 @@
<version>${plugin.deploy.version}</version>
</plugin>
- <!--TODO(alex): make part of the build and reconcile conflicts
+ <!--TODO(alex): make part of the build and reconcile conflicts
<plugin>
<groupId>com.ning.maven.plugins</groupId>
<artifactId>maven-duplicate-finder-plugin</artifactId>