You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by iw...@apache.org on 2022/01/19 12:36:55 UTC

[bigtop] branch master updated: BIGTOP-3625. Fix Livy's build failure (#854)

This is an automated email from the ASF dual-hosted git repository.

iwasakims pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git


The following commit(s) were added to refs/heads/master by this push:
     new 4ab759f  BIGTOP-3625. Fix Livy's build failure (#854)
4ab759f is described below

commit 4ab759fd227871d391550e9239a7c4d11ef9e266
Author: Masatake Iwasaki <iw...@apache.org>
AuthorDate: Wed Jan 19 21:35:36 2022 +0900

    BIGTOP-3625. Fix Livy's build failure (#854)
---
 bigtop-packages/src/common/livy/do-component-build |   10 +-
 ...h1-LIVY-756.diff => patch1-LIVY-756-0.7.1.diff} | 1920 ++++++++------------
 bigtop-packages/src/rpm/livy/SPECS/livy.spec       |    2 +-
 bigtop.bom                                         |    2 +-
 4 files changed, 806 insertions(+), 1128 deletions(-)

diff --git a/bigtop-packages/src/common/livy/do-component-build b/bigtop-packages/src/common/livy/do-component-build
index d4d920e..5ee67b7 100644
--- a/bigtop-packages/src/common/livy/do-component-build
+++ b/bigtop-packages/src/common/livy/do-component-build
@@ -22,13 +22,13 @@ LIVY_BUILD_OPTS="-Drat.skip=true \
                  -DskipTests \
                  -Dskip=true \
                  -DskipITs=true \
-                 -Dscala.version=${SCALA_VERSION} \
-                 -Dscala.binary.version=${SCALA_VERSION%.*} \
+                 -Dscala-${SCALA_VERSION%.*}.version=${SCALA_VERSION} \
+                 -Dscala-2.12.version=2.12.15 \
                  -Dzookeeper.version=${ZOOKEEPER_VERSION} \
                  -Dhadoop.version=${HADOOP_VERSION} \
-                 -Dspark.version=${SPARK_VERSION} \
-                 -Pspark-${SPARK_VERSION%.*}"
+                 -Dspark.scala-${SCALA_VERSION%.*}.version=${SPARK_VERSION} \
+                 -Pspark-3.0"
 
 mvn install $LIVY_BUILD_OPTS "$@"
 
-unzip assembly/target/apache-livy-$LIVY_VERSION-incubating-bin_${SCALA_VERSION%.*}.zip && mv apache-livy-$LIVY_VERSION-incubating-bin_${SCALA_VERSION%.*} build
+unzip assembly/target/apache-livy-$LIVY_VERSION-incubating-bin.zip && mv apache-livy-$LIVY_VERSION-incubating-bin build
diff --git a/bigtop-packages/src/common/livy/patch1-LIVY-756.diff b/bigtop-packages/src/common/livy/patch1-LIVY-756-0.7.1.diff
similarity index 66%
rename from bigtop-packages/src/common/livy/patch1-LIVY-756.diff
rename to bigtop-packages/src/common/livy/patch1-LIVY-756-0.7.1.diff
index 9f5a919..c214b4a 100644
--- a/bigtop-packages/src/common/livy/patch1-LIVY-756.diff
+++ b/bigtop-packages/src/common/livy/patch1-LIVY-756-0.7.1.diff
@@ -1,148 +1,115 @@
-diff --git a/.rat-excludes b/.rat-excludes
-index ac29fe6..1df6e9e 100644
---- a/.rat-excludes
-+++ b/.rat-excludes
-@@ -28,3 +28,4 @@ logs/*
- **/jquery-2.1.1.min.js
- docs/**/*.html
- docs/**/JB/**
-+venv/*
-diff --git a/.travis.yml b/.travis.yml
-index c2c0ffd..f39ecf8 100644
---- a/.travis.yml
-+++ b/.travis.yml
-@@ -34,6 +34,12 @@ matrix:
-     env: MVN_FLAG='-Pspark-2.4 -Pthriftserver -DskipITs'
-   - name: "Spark 2.4 ITs"
-     env: MVN_FLAG='-Pspark-2.4 -Pthriftserver -DskipTests'
-+  - name: "Spark 3.0 Unit Tests"
-+    env: MVN_FLAG='-Pthriftserver -Pspark-3.0 -DskipITs'
-+  - name: "Spark 3.0 ITs"
-+    env: 
-+      - MVN_FLAG='-Pthriftserver -Pspark-3.0 -DskipTests'
-+      - PYSPARK_ROW_FIELD_SORTING_ENABLED=true
- 
- jdk:
-   - oraclejdk8
+commit 840078c09b77f0d1d676d3f7c042ce154b0575a4
+Author: jerryshao <je...@tencent.com>
+Date:   Thu Jul 2 15:44:12 2020 +0800
+
+    [LIVY-756] Add Spark 3.0 and Scala 2.12 support
+    
+    This PR is based tprelle 's PR #289 , and address all the left issues in that PR:
+    
+    1. multi-scala version support in one build (Scala 2.11 and 2.12 support).
+    2. make SparkR work.
+    
+    Also reverts most of the unnecessary changes. Besides this PR remove the build below 2.4 (2.2, 2.3), since Spark 2.2 and 2.3 only ships with Scala 2.11, hard to maintain multiple version. But user could still use 2.2 and 2.3 without changes.
+    
+    All credits to tprelle.
+    
+    Run UT and IT with Spark 2.4.5 and 3.0.0 locally.
+    
+    Author: jerryshao <je...@tencent.com>
+    
+    Closes #300 from jerryshao/LIVY-756.
+    
+    (cherry picked from commit 97cf2f75929ef6c152afc468adbead269bd0758f)
+    
+     Conflicts:
+            integration-test/pom.xml
+            integration-test/src/test/scala/org/apache/livy/test/InteractiveIT.scala
+            integration-test/src/test/spark2/scala/Spark2JobApiIT.scala
+            pom.xml
+
+diff --git a/README.md b/README.md
+index d5219e5..d454cbc 100644
+--- a/README.md
++++ b/README.md
+@@ -57,7 +57,7 @@ Required python packages for building Livy:
+ To run Livy, you will also need a Spark installation. You can get Spark releases at
+ https://spark.apache.org/downloads.html.
+ 
+-Livy requires Spark 2.2+. You can switch to a different version of Spark by setting the
++Livy requires Spark 2.4+. You can switch to a different version of Spark by setting the
+ ``SPARK_HOME`` environment variable in the Livy server process, without needing to rebuild Livy.
+ 
+ 
+@@ -71,7 +71,7 @@ cd incubator-livy
+ mvn package
+ ```
+ 
+-By default Livy is built against Apache Spark 2.2.0, but the version of Spark used when running
++By default Livy is built against Apache Spark 2.4.5, but the version of Spark used when running
+ Livy does not need to match the version used to build Livy. Livy internally handles the differences
+ between different Spark versions.
+ 
 diff --git a/assembly/assembly.xml b/assembly/assembly.xml
-index de61aee..d75371c 100644
+index de61aee..eaefbb5 100644
 --- a/assembly/assembly.xml
 +++ b/assembly/assembly.xml
-@@ -56,8 +56,8 @@
+@@ -62,6 +62,13 @@
+         <include>*</include>
        </includes>
      </fileSet>
++    <fileSet>
++      <directory>${project.parent.basedir}/repl/scala-2.12/target/jars</directory>
++      <outputDirectory>${assembly.name}/repl_2.12-jars</outputDirectory>
++      <includes>
++        <include>*</include>
++      </includes>
++    </fileSet>
      <fileSet>
--      <directory>${project.parent.basedir}/repl/scala-2.11/target/jars</directory>
--      <outputDirectory>${assembly.name}/repl_2.11-jars</outputDirectory>
-+      <directory>${project.parent.basedir}/repl/scala-${scala.binary.version}/target/jars</directory>
-+      <outputDirectory>${assembly.name}/repl_${scala.binary.version}-jars</outputDirectory>
-       <includes>
-         <include>*</include>
-       </includes>
+       <directory>${project.parent.basedir}/server/target/jars</directory>
+       <outputDirectory>${assembly.name}/jars</outputDirectory>
 diff --git a/assembly/pom.xml b/assembly/pom.xml
-index 41cca2b..d1ac16f 100644
+index 36bb48c..6928354 100644
 --- a/assembly/pom.xml
 +++ b/assembly/pom.xml
-@@ -29,7 +29,7 @@
-   <packaging>pom</packaging>
- 
-   <properties>
--    <assembly.name>apache-livy-${project.version}-bin</assembly.name>
-+    <assembly.name>apache-livy-${project.version}-bin_${scala.binary.version}</assembly.name>
-     <assembly.format>zip</assembly.format>
-     <skipDeploy>true</skipDeploy>
-   </properties>
-@@ -43,7 +43,7 @@
- 
-     <dependency>
-       <groupId>${project.groupId}</groupId>
--      <artifactId>livy-repl_2.11</artifactId>
-+      <artifactId>livy-repl_${scala.binary.version}</artifactId>
+@@ -47,6 +47,12 @@
        <version>${project.version}</version>
      </dependency>
  
-@@ -51,6 +51,79 @@
-       <groupId>${project.groupId}</groupId>
-       <artifactId>livy-server</artifactId>
-       <version>${project.version}</version>
-+      <exclusions>
-+        <exclusion>
-+          <groupId>${project.groupId}</groupId>
-+          <artifactId>livy-core_2.11</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>${project.groupId}</groupId>
-+          <artifactId>livy-core_2.12</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>org.json4s</groupId>
-+          <artifactId>*</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>org.scalatra</groupId>
-+          <artifactId>*</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>com.fasterxml.jackson.module</groupId>
-+          <artifactId>jackson-module-scala_2.11</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>com.fasterxml.jackson.module</groupId>
-+          <artifactId>jackson-module-scala_2.12</artifactId>
-+        </exclusion>
-+      </exclusions>
-+    </dependency>
-+
 +    <dependency>
 +      <groupId>${project.groupId}</groupId>
-+      <artifactId>livy-core_${scala.binary.version}</artifactId>
++      <artifactId>livy-repl_2.12</artifactId>
 +      <version>${project.version}</version>
 +    </dependency>
 +
-+    <dependency>
-+      <groupId>org.json4s</groupId>
-+      <artifactId>json4s-ast_${scala.binary.version}</artifactId>
-+    </dependency>
-+
-+    <dependency>
-+      <groupId>org.json4s</groupId>
-+      <artifactId>json4s-core_${scala.binary.version}</artifactId>
-+    </dependency>
-+
-+    <dependency>
-+      <groupId>org.json4s</groupId>
-+      <artifactId>json4s-jackson_${scala.binary.version}</artifactId>
-+    </dependency>
-+    <dependency>
-+      <groupId>org.scalatra</groupId>
-+      <artifactId>scalatra_${scala.binary.version}</artifactId>
-+    </dependency>
+     <dependency>
+       <groupId>${project.groupId}</groupId>
+       <artifactId>livy-server</artifactId>
+@@ -75,6 +81,23 @@
+           </execution>
+         </executions>
+       </plugin>
 +
-+    <dependency>
-+      <groupId>org.scalatra</groupId>
-+      <artifactId>scalatra-json_${scala.binary.version}</artifactId>
-+    </dependency>
++      <plugin>
++        <groupId>org.apache.maven.plugins</groupId>
++        <artifactId>maven-surefire-plugin</artifactId>
++        <configuration>
++          <skipTests>true</skipTests>
++        </configuration>
++      </plugin>
 +
-+    <dependency>
-+      <groupId>org.scalatra</groupId>
-+      <artifactId>scalatra-metrics_${scala.binary.version}</artifactId>
-+      <version>${scalatra.version}</version>
-+      <exclusions>
-+        <exclusion>
-+          <groupId>com.typesafe.akka</groupId>
-+          <artifactId>akka-actor_${scala.binary.version}</artifactId>
-+        </exclusion>
-+      </exclusions>
-+    </dependency>
++      <plugin>
++        <groupId>org.scalatest</groupId>
++        <artifactId>scalatest-maven-plugin</artifactId>
++        <configuration>
++          <skipTests>true</skipTests>
++        </configuration>
++      </plugin>
 +
-+    <dependency>
-+      <groupId>com.fasterxml.jackson.module</groupId>
-+      <artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
-     </dependency>
-   </dependencies>
+     </plugins>
+   </build>
  
 diff --git a/client-common/pom.xml b/client-common/pom.xml
-index 8bc52b9..0a72ebe 100644
+index 3897c2b..3864527 100644
 --- a/client-common/pom.xml
 +++ b/client-common/pom.xml
 @@ -36,7 +36,7 @@
@@ -155,7 +122,7 @@ index 8bc52b9..0a72ebe 100644
      </dependency>
      <dependency>
 diff --git a/client-common/src/main/java/org/apache/livy/client/common/Serializer.java b/client-common/src/main/java/org/apache/livy/client/common/Serializer.java
-index 3ea3f56..9d226e4 100644
+index 3ea3f56..2f879ac 100644
 --- a/client-common/src/main/java/org/apache/livy/client/common/Serializer.java
 +++ b/client-common/src/main/java/org/apache/livy/client/common/Serializer.java
 @@ -23,7 +23,8 @@ import java.nio.ByteBuffer;
@@ -174,12 +141,71 @@ index 3ea3f56..9d226e4 100644
          }
 -        kryo.setInstantiatorStrategy(new StdInstantiatorStrategy());
 +        kryo.setInstantiatorStrategy(new Kryo.DefaultInstantiatorStrategy(
-+                new StdInstantiatorStrategy()));
++          new StdInstantiatorStrategy()));
 +        kryo.register(java.lang.invoke.SerializedLambda.class);
 +        kryo.register(ClosureSerializer.Closure.class, new ClosureSerializer());
          kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
          return kryo;
        }
+diff --git a/core/scala-2.12/pom.xml b/core/scala-2.12/pom.xml
+new file mode 100644
+index 0000000..5034774
+--- /dev/null
++++ b/core/scala-2.12/pom.xml
+@@ -0,0 +1,53 @@
++<?xml version="1.0" encoding="UTF-8"?>
++<!--
++  ~ Licensed to the Apache Software Foundation (ASF) under one or more
++  ~ contributor license agreements.  See the NOTICE file distributed with
++  ~ this work for additional information regarding copyright ownership.
++  ~ The ASF licenses this file to You under the Apache License, Version 2.0
++  ~ (the "License"); you may not use this file except in compliance with
++  ~ the License.  You may obtain a copy of the License at
++  ~
++  ~    http://www.apache.org/licenses/LICENSE-2.0
++  ~
++  ~ Unless required by applicable law or agreed to in writing, software
++  ~ distributed under the License is distributed on an "AS IS" BASIS,
++  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++  ~ See the License for the specific language governing permissions and
++  ~ limitations under the License.
++-->
++<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
++  <modelVersion>4.0.0</modelVersion>
++  <groupId>org.apache.livy</groupId>
++  <artifactId>livy-core_2.12</artifactId>
++  <version>0.7.1-incubating</version>
++  <packaging>jar</packaging>
++
++  <parent>
++    <groupId>org.apache.livy</groupId>
++    <artifactId>livy-core-parent</artifactId>
++    <version>0.7.1-incubating</version>
++    <relativePath>../pom.xml</relativePath>
++  </parent>
++
++  <properties>
++    <scala.version>${scala-2.12.version}</scala.version>
++    <scala.binary.version>2.12</scala.binary.version>
++  </properties>
++
++  <build>
++    <plugins>
++      <plugin>
++        <groupId>org.apache.maven.plugins</groupId>
++        <artifactId>maven-jar-plugin</artifactId>
++        <executions>
++          <execution>
++            <goals>
++              <goal>test-jar</goal>
++            </goals>
++          </execution>
++        </executions>
++      </plugin>
++    </plugins>
++  </build>
++
++</project>
 diff --git a/core/src/test/scala/org/apache/livy/LivyBaseUnitTestSuite.scala b/core/src/test/scala/org/apache/livy/LivyBaseUnitTestSuite.scala
 index 908172b..65c5cd8 100644
 --- a/core/src/test/scala/org/apache/livy/LivyBaseUnitTestSuite.scala
@@ -197,112 +223,70 @@ index 908172b..65c5cd8 100644
    protected override def withFixture(test: NoArgTest): Outcome = {
      val testName = test.name
 diff --git a/coverage/pom.xml b/coverage/pom.xml
-index e4c508a..17682f3 100644
+index 9c23dca..495b86c 100644
 --- a/coverage/pom.xml
 +++ b/coverage/pom.xml
-@@ -52,31 +52,98 @@
- 
-     <dependency>
-       <groupId>${project.groupId}</groupId>
--      <artifactId>livy-core_2.11</artifactId>
-+      <artifactId>livy-repl_${scala.binary.version}</artifactId>
-       <version>${project.version}</version>
-     </dependency>
- 
-     <dependency>
-       <groupId>${project.groupId}</groupId>
--      <artifactId>livy-repl_2.11</artifactId>
-+      <artifactId>livy-rsc</artifactId>
+@@ -56,12 +56,24 @@
        <version>${project.version}</version>
      </dependency>
  
++    <dependency>
++      <groupId>${project.groupId}</groupId>
++      <artifactId>livy-core_2.12</artifactId>
++      <version>${project.version}</version>
++    </dependency>
++
      <dependency>
        <groupId>${project.groupId}</groupId>
--      <artifactId>livy-rsc</artifactId>
-+      <artifactId>livy-server</artifactId>
+       <artifactId>livy-repl_2.11</artifactId>
        <version>${project.version}</version>
-+      <exclusions>
-+        <exclusion>
-+          <groupId>${project.groupId}</groupId>
-+          <artifactId>livy-core_2.11</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>${project.groupId}</groupId>
-+          <artifactId>livy-core_2.12</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>org.json4s</groupId>
-+          <artifactId>*</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>org.scalatra</groupId>
-+          <artifactId>*</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>com.fasterxml.jackson.module</groupId>
-+          <artifactId>jackson-module-scala_2.11</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>com.fasterxml.jackson.module</groupId>
-+          <artifactId>jackson-module-scala_2.12</artifactId>
-+        </exclusion>
-+      </exclusions>
      </dependency>
  
++    <dependency>
++      <groupId>${project.groupId}</groupId>
++      <artifactId>livy-repl_2.12</artifactId>
++      <version>${project.version}</version>
++    </dependency>
++
      <dependency>
        <groupId>${project.groupId}</groupId>
--      <artifactId>livy-server</artifactId>
-+      <artifactId>livy-core_${scala.binary.version}</artifactId>
+       <artifactId>livy-rsc</artifactId>
+@@ -80,6 +92,12 @@
        <version>${project.version}</version>
      </dependency>
  
 +    <dependency>
-+      <groupId>org.json4s</groupId>
-+      <artifactId>json4s-ast_${scala.binary.version}</artifactId>
-+    </dependency>
-+
-+    <dependency>
-+      <groupId>org.json4s</groupId>
-+      <artifactId>json4s-core_${scala.binary.version}</artifactId>
-+    </dependency>
-+
-+    <dependency>
-+      <groupId>org.json4s</groupId>
-+      <artifactId>json4s-jackson_${scala.binary.version}</artifactId>
-+    </dependency>
-+    <dependency>
-+      <groupId>org.scalatra</groupId>
-+      <artifactId>scalatra_${scala.binary.version}</artifactId>
++      <groupId>${project.groupId}</groupId>
++      <artifactId>livy-scala-api_2.12</artifactId>
++      <version>${project.version}</version>
 +    </dependency>
 +
-+    <dependency>
-+      <groupId>org.scalatra</groupId>
-+      <artifactId>scalatra-json_${scala.binary.version}</artifactId>
-+    </dependency>
+     <dependency>
+       <groupId>${project.groupId}</groupId>
+       <artifactId>livy-integration-test</artifactId>
+@@ -116,6 +134,23 @@
+           </execution>
+         </executions>
+       </plugin>
 +
-+    <dependency>
-+      <groupId>org.scalatra</groupId>
-+      <artifactId>scalatra-metrics_${scala.binary.version}</artifactId>
-+      <version>${scalatra.version}</version>
-+      <exclusions>
-+        <exclusion>
-+          <groupId>com.typesafe.akka</groupId>
-+          <artifactId>akka-actor_${scala.binary.version}</artifactId>
-+        </exclusion>
-+      </exclusions>
-+    </dependency>
++      <plugin>
++        <groupId>org.apache.maven.plugins</groupId>
++        <artifactId>maven-surefire-plugin</artifactId>
++        <configuration>
++          <skipTests>true</skipTests>
++        </configuration>
++      </plugin>
 +
-+    <dependency>
-+      <groupId>com.fasterxml.jackson.module</groupId>
-+      <artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
-+    </dependency>
++      <plugin>
++        <groupId>org.scalatest</groupId>
++        <artifactId>scalatest-maven-plugin</artifactId>
++        <configuration>
++          <skipTests>true</skipTests>
++        </configuration>
++      </plugin>
 +
-     <dependency>
-       <groupId>${project.groupId}</groupId>
--      <artifactId>livy-scala-api_2.11</artifactId>
-+      <artifactId>livy-scala-api_${scala.binary.version}</artifactId>
-       <version>${project.version}</version>
-     </dependency>
+     </plugins>
+   </build>
  
 diff --git a/examples/src/main/scala/org/apache/livy/examples/WordCountApp.scala b/examples/src/main/scala/org/apache/livy/examples/WordCountApp.scala
 index da30a76..2285f52 100644
@@ -318,176 +302,59 @@ index da30a76..2285f52 100644
          "group by word order by word_count desc limit 1")
        result.first().toString()
 diff --git a/integration-test/pom.xml b/integration-test/pom.xml
-index f652c64..5fa4a92 100644
+index 80a9c29..e36531d 100644
 --- a/integration-test/pom.xml
 +++ b/integration-test/pom.xml
-@@ -49,10 +49,50 @@
-       <version>${project.version}</version>
-     </dependency>
+@@ -297,6 +297,7 @@
+             <LIVY_INTEGRATION_TEST>true</LIVY_INTEGRATION_TEST>
+             <SPARK_HOME>${project.build.directory}/${spark.bin.name}</SPARK_HOME>
+             <LIVY_TEST_THRIFT_ENABLED>${livy.test.thrift.enabled}</LIVY_TEST_THRIFT_ENABLED>
++            <PYSPARK_ROW_FIELD_SORTING_ENABLED>true</PYSPARK_ROW_FIELD_SORTING_ENABLED>
+           </environmentVariables>
+           <systemProperties>
+             <cluster.spec>${cluster.spec}</cluster.spec>
+diff --git a/integration-test/src/test/resources/rtest.R b/integration-test/src/test/resources/rtest.R
+index a026a10..d955b7f 100644
+--- a/integration-test/src/test/resources/rtest.R
++++ b/integration-test/src/test/resources/rtest.R
+@@ -17,18 +17,17 @@
  
-+
-+    <dependency>
-+      <groupId>${project.groupId}</groupId>
-+      <artifactId>livy-rsc</artifactId>
-+      <version>${project.version}</version>
-+    </dependency>
-+
-+
-     <dependency>
-       <groupId>${project.groupId}</groupId>
-       <artifactId>livy-server</artifactId>
-       <version>${project.version}</version>
-+      <exclusions>
-+        <exclusion>
-+          <groupId>${project.groupId}</groupId>
-+          <artifactId>livy-core_2.11</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>${project.groupId}</groupId>
-+          <artifactId>livy-core_2.12</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>org.json4s</groupId>
-+          <artifactId>*</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>org.scalatra</groupId>
-+          <artifactId>*</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>com.fasterxml.jackson.module</groupId>
-+          <artifactId>jackson-module-scala_2.11</artifactId>
-+        </exclusion>
-+        <exclusion>
-+          <groupId>com.fasterxml.jackson.module</groupId>
-+          <artifactId>jackson-module-scala_2.12</artifactId>
-+        </exclusion>
-+      </exclusions>
-+    </dependency>
-+
-+    <dependency>
-+      <groupId>${project.groupId}</groupId>
-+      <artifactId>livy-core_${scala.binary.version}</artifactId>
-+      <version>${project.version}</version>
-     </dependency>
+ library(SparkR)
  
-     <dependency>
-@@ -69,6 +109,11 @@
-       <scope>test</scope>
-     </dependency>
+-# Initialize SparkContext and SQLContext
+-sc <- sparkR.init(appName="SparkR-DataFrame-example")
+-sqlContext <- sparkRSQL.init(sc)
++# Initialize SparkSession
++sparkR.session(appName = "SparkR-DataFrame-example")
  
-+    <dependency>
-+      <groupId>org.json4s</groupId>
-+      <artifactId>json4s-ast_${scala.binary.version}</artifactId>
-+    </dependency>
-+
-     <dependency>
-       <groupId>com.fasterxml.jackson.core</groupId>
-       <artifactId>jackson-core</artifactId>
-@@ -79,11 +124,56 @@
-       <artifactId>jackson-databind</artifactId>
-     </dependency>
+ # Create a simple local data.frame
+ localDF <- data.frame(name=c("John", "Smith", "Sarah"), age=c(19, 23, 18))
  
-+    <dependency>
-+      <groupId>org.json4s</groupId>
-+      <artifactId>json4s-core_${scala.binary.version}</artifactId>
-+    </dependency>
+ # Convert local data frame to a SparkDataFrame
+-df <- createDataFrame(sqlContext, localDF)
++df <- createDataFrame(localDF)
+ 
+ # Print its schema
+ printSchema(df)
+ 
+ # Stop the SparkContext now
+-sparkR.stop()
++sparkR.session.stop()
+diff --git a/integration-test/src/test/scala/org/apache/livy/test/InteractiveIT.scala b/integration-test/src/test/scala/org/apache/livy/test/InteractiveIT.scala
+index 0c3d632..d1fa5e8 100644
+--- a/integration-test/src/test/scala/org/apache/livy/test/InteractiveIT.scala
++++ b/integration-test/src/test/scala/org/apache/livy/test/InteractiveIT.scala
+@@ -37,7 +37,8 @@ class InteractiveIT extends BaseIntegrationTestSuite {
+       s.run("val scalaVersion = util.Properties.versionString").result().left.foreach(info(_))
+       s.run("1+1").verifyResult("res0: Int = 2\n")
+       s.run("""sc.getConf.get("spark.executor.instances")""").verifyResult("res1: String = 1\n")
+-      s.run("val sql = new org.apache.spark.sql.SQLContext(sc)").verifyResult(
 +
-+    <dependency>
-+      <groupId>org.json4s</groupId>
-+      <artifactId>json4s-jackson_${scala.binary.version}</artifactId>
-+    </dependency>
-+    <dependency>
-+      <groupId>org.scalatra</groupId>
-+      <artifactId>scalatra_${scala.binary.version}</artifactId>
-+    </dependency>
-+
-+    <dependency>
-+      <groupId>org.scalatra</groupId>
-+      <artifactId>scalatra-json_${scala.binary.version}</artifactId>
-+    </dependency>
-+
-+    <dependency>
-+      <groupId>org.scalatra</groupId>
-+      <artifactId>scalatra-metrics_${scala.binary.version}</artifactId>
-+      <version>${scalatra.version}</version>
-+      <exclusions>
-+        <exclusion>
-+          <groupId>com.typesafe.akka</groupId>
-+          <artifactId>akka-actor_${scala.binary.version}</artifactId>
-+        </exclusion>
-+      </exclusions>
-+    </dependency>
-+
-     <dependency>
-       <groupId>com.fasterxml.jackson.module</groupId>
-       <artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
-     </dependency>
- 
-+    <dependency>
-+      <groupId>${project.groupId}</groupId>
-+      <artifactId>livy-client-http</artifactId>
-+      <version>${project.version}</version>
-+      <scope>test</scope>
-+    </dependency>
-+
-+    <dependency>
-+      <groupId>${project.groupId}</groupId>
-+      <artifactId>livy-test-lib</artifactId>
-+      <version>${project.version}</version>
-+      <scope>test</scope>
-+    </dependency>
-+
-     <dependency>
-       <groupId>com.ning</groupId>
-       <artifactId>async-http-client</artifactId>
-diff --git a/integration-test/src/test/resources/rtest.R b/integration-test/src/test/resources/rtest.R
-index a026a10..d6cde73 100644
---- a/integration-test/src/test/resources/rtest.R
-+++ b/integration-test/src/test/resources/rtest.R
-@@ -18,14 +18,13 @@
- library(SparkR)
- 
- # Initialize SparkContext and SQLContext
--sc <- sparkR.init(appName="SparkR-DataFrame-example")
--sqlContext <- sparkRSQL.init(sc)
-+sc <- sparkR.session(appName="SparkR-DataFrame-example")
- 
- # Create a simple local data.frame
- localDF <- data.frame(name=c("John", "Smith", "Sarah"), age=c(19, 23, 18))
- 
- # Convert local data frame to a SparkDataFrame
--df <- createDataFrame(sqlContext, localDF)
-+df <- createDataFrame(localDF)
- 
- # Print its schema
- printSchema(df)
-diff --git a/integration-test/src/test/scala/org/apache/livy/test/BatchIT.scala b/integration-test/src/test/scala/org/apache/livy/test/BatchIT.scala
-index a6f4e73..76828e7 100644
---- a/integration-test/src/test/scala/org/apache/livy/test/BatchIT.scala
-+++ b/integration-test/src/test/scala/org/apache/livy/test/BatchIT.scala
-@@ -76,6 +76,7 @@ class BatchIT extends BaseIntegrationTestSuite with BeforeAndAfterAll {
-   }
- 
-   test("submit a SparkR application") {
-+    assume(!sys.props.getOrElse("skipRTests", "false").toBoolean, "Skipping R tests.")
-     val hdfsPath = uploadResource("rtest.R")
-     withScript(hdfsPath, List.empty) { s =>
-       s.verifySessionSuccess()
-diff --git a/integration-test/src/test/scala/org/apache/livy/test/InteractiveIT.scala b/integration-test/src/test/scala/org/apache/livy/test/InteractiveIT.scala
-index 0c3d632..79e4b50 100644
---- a/integration-test/src/test/scala/org/apache/livy/test/InteractiveIT.scala
-+++ b/integration-test/src/test/scala/org/apache/livy/test/InteractiveIT.scala
-@@ -37,7 +37,7 @@ class InteractiveIT extends BaseIntegrationTestSuite {
-       s.run("val scalaVersion = util.Properties.versionString").result().left.foreach(info(_))
-       s.run("1+1").verifyResult("res0: Int = 2\n")
-       s.run("""sc.getConf.get("spark.executor.instances")""").verifyResult("res1: String = 1\n")
--      s.run("val sql = new org.apache.spark.sql.SQLContext(sc)").verifyResult(
 +      s.run("val sql = spark.sqlContext").verifyResult(
          ".*" + Pattern.quote(
          "sql: org.apache.spark.sql.SQLContext = org.apache.spark.sql.SQLContext") + ".*")
        s.run("abcde").verifyError(evalue = ".*?:[0-9]+: error: not found: value abcde.*")
-@@ -47,7 +47,7 @@ class InteractiveIT extends BaseIntegrationTestSuite {
+@@ -47,7 +48,7 @@ class InteractiveIT extends BaseIntegrationTestSuite {
        // Verify query submission
        s.run(s"""val df = spark.createDataFrame(Seq(("jerry", 20), ("michael", 21)))""")
          .verifyResult(".*" + Pattern.quote("df: org.apache.spark.sql.DataFrame") + ".*")
@@ -496,27 +363,97 @@ index 0c3d632..79e4b50 100644
        s.run("SELECT * FROM people", Some(SQL)).verifyResult(".*\"jerry\",20.*\"michael\",21.*")
  
        // Verify Livy internal configurations are not exposed.
-@@ -95,6 +95,7 @@ class InteractiveIT extends BaseIntegrationTestSuite {
+@@ -104,7 +105,7 @@ class InteractiveIT extends BaseIntegrationTestSuite {
+       s.run("1+1").verifyResult(startsWith(s"[$count] 2"))
+       s.run("""localDF <- data.frame(name=c("John", "Smith", "Sarah"), age=c(19, 23, 18))""")
+         .verifyResult(null)
+-      s.run("df <- createDataFrame(sqlContext, localDF)").verifyResult(null)
++      s.run("df <- createDataFrame(localDF)").verifyResult(null)
+       s.run("printSchema(df)").verifyResult(literal(
+         """|root
+           | |-- name: string (nullable = true)
+diff --git a/integration-test/src/test/spark2/scala/Spark2JobApiIT.scala b/integration-test/src/test/spark2/scala/Spark2JobApiIT.scala
+index 20ebf3d..441b103 100644
+--- a/integration-test/src/test/spark2/scala/Spark2JobApiIT.scala
++++ b/integration-test/src/test/spark2/scala/Spark2JobApiIT.scala
+@@ -22,6 +22,8 @@ import java.net.URI
+ import java.util.concurrent.{TimeUnit, Future => JFuture}
+ import javax.servlet.http.HttpServletResponse
+ 
++import scala.util.Properties
++
+ import com.fasterxml.jackson.databind.ObjectMapper
+ import com.fasterxml.jackson.module.scala.DefaultScalaModule
+ import org.scalatest.BeforeAndAfterAll
+@@ -31,6 +33,7 @@ import org.apache.livy.client.common.HttpMessages._
+ import org.apache.livy.sessions.SessionKindModule
+ import org.apache.livy.test.framework.BaseIntegrationTestSuite
+ import org.apache.livy.test.jobs.spark2._
++import org.apache.livy.utils.LivySparkUtils
+ 
+ class Spark2JobApiIT extends BaseIntegrationTestSuite with BeforeAndAfterAll with Logging {
+ 
+@@ -50,7 +53,8 @@ class Spark2JobApiIT extends BaseIntegrationTestSuite with BeforeAndAfterAll wit
+     livyClient.connectSession(sessionId).stop()
    }
  
-   test("R interactive session") {
-+    assume(!sys.props.getOrElse("skipRTests", "false").toBoolean, "Skipping R tests.")
-     withNewSession(SparkR) { s =>
-       // R's output sometimes includes the count of statements, which makes it annoying to test
-       // things. This helps a bit.
+-  test("create a new session and upload test jar") {
++  scalaTest("create a new session and upload test jar") {
++    val prevSessionCount = sessionList().total
+     val tempClient = createClient(livyEndpoint)
+ 
+     try {
+@@ -78,13 +82,13 @@ class Spark2JobApiIT extends BaseIntegrationTestSuite with BeforeAndAfterAll wit
+     }
+   }
+ 
+-  test("run spark2 job") {
++  scalaTest("run spark2 job") {
+     assume(client != null, "Client not active.")
+     val result = waitFor(client.submit(new SparkSessionTest()))
+     assert(result === 3)
+   }
+ 
+-  test("run spark2 dataset job") {
++  scalaTest("run spark2 dataset job") {
+     assume(client != null, "Client not active.")
+     val result = waitFor(client.submit(new DatasetTest()))
+     assert(result === 2)
+@@ -103,4 +107,18 @@ class Spark2JobApiIT extends BaseIntegrationTestSuite with BeforeAndAfterAll wit
+   private def createClient(uri: String): LivyClient = {
+     new LivyClientBuilder().setURI(new URI(uri)).build()
+   }
++
++  protected def scalaTest(desc: String)(testFn: => Unit): Unit = {
++    test(desc) {
++      val livyConf = new LivyConf()
++      val (sparkVersion, scalaVersion) = LivySparkUtils.sparkSubmitVersion(livyConf)
++      val formattedSparkVersion = LivySparkUtils.formatSparkVersion(sparkVersion)
++      val versionString =
++        LivySparkUtils.sparkScalaVersion(formattedSparkVersion, scalaVersion, livyConf)
++
++      assume(versionString == LivySparkUtils.formatScalaVersion(Properties.versionNumberString),
++        s"Scala test can only be run with ${Properties.versionString}")
++      testFn
++    }
++  }
+ }
 diff --git a/pom.xml b/pom.xml
-index 6b4d5a4..513a35e 100644
+index 20b1a55..9b76e78 100644
 --- a/pom.xml
 +++ b/pom.xml
-@@ -78,6 +78,7 @@
+@@ -78,40 +78,46 @@
    </mailingLists>
  
    <properties>
-+    <asynchttpclient.version>2.0.23</asynchttpclient.version>
++    <asynchttpclient.version>2.10.1</asynchttpclient.version>
      <hadoop.version>2.7.3</hadoop.version>
      <hadoop.scope>compile</hadoop.scope>
-     <spark.scala-2.11.version>2.2.3</spark.scala-2.11.version>
-@@ -86,24 +87,25 @@
+-    <spark.scala-2.11.version>2.2.3</spark.scala-2.11.version>
++    <spark.scala-2.11.version>2.4.5</spark.scala-2.11.version>
++    <spark.scala-2.12.version>2.4.5</spark.scala-2.12.version>
+     <spark.version>${spark.scala-2.11.version}</spark.version>
+     <hive.version>3.0.0</hive.version>
      <commons-codec.version>1.9</commons-codec.version>
      <httpclient.version>4.5.3</httpclient.version>
      <httpcore.version>4.4.4</httpcore.version>
@@ -524,15 +461,20 @@ index 6b4d5a4..513a35e 100644
 +    <jackson.version>2.10.1</jackson.version>
      <javax.servlet-api.version>3.1.0</javax.servlet-api.version>
      <jetty.version>9.3.24.v20180605</jetty.version>
-     <json4s.version>3.2.11</json4s.version>
+-    <json4s.version>3.2.11</json4s.version>
++    <json4s.spark-2.11.version>3.5.3</json4s.spark-2.11.version>
++    <json4s.spark-2.12.version>3.5.3</json4s.spark-2.12.version>
++    <json4s.version>${json4s.spark-2.11.version}</json4s.version>
      <junit.version>4.11</junit.version>
      <libthrift.version>0.9.3</libthrift.version>
 -    <kryo.version>2.22</kryo.version>
 +    <kryo.version>4.0.2</kryo.version>
      <metrics.version>3.1.0</metrics.version>
 -    <mockito.version>1.9.5</mockito.version>
+-    <netty.spark-2.11.version>4.0.37.Final</netty.spark-2.11.version>
 +    <mockito.version>1.10.19</mockito.version>
-     <netty.spark-2.11.version>4.0.37.Final</netty.spark-2.11.version>
++    <netty.spark-2.11.version>4.1.17.Final</netty.spark-2.11.version>
++    <netty.spark-2.12.version>4.1.17.Final</netty.spark-2.12.version>
      <netty.version>${netty.spark-2.11.version}</netty.version>
      <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
      <py4j.version>0.10.7</py4j.version>
@@ -547,55 +489,36 @@ index 6b4d5a4..513a35e 100644
      <java.version>1.8</java.version>
      <test.redirectToFile>true</test.redirectToFile>
      <execution.root>${user.dir}</execution.root>
-@@ -121,6 +123,9 @@
-     <!-- Set this to "true" to skip PySpark3 tests. -->
-     <skipPySpark3Tests>false</skipPySpark3Tests>
- 
-+    <!-- Set this to "true" to skip Python 2 tests. -->
-+    <skipPython2Tests>false</skipPython2Tests>
-+
-     <!-- Required for testing LDAP integration -->
-     <apacheds.version>2.0.0-M21</apacheds.version>
-     <ldap-api.version>1.0.0-M33</ldap-api.version>
-@@ -190,6 +195,17 @@
-         <enabled>false</enabled>
-       </snapshots>
-     </repository>
-+    <repository>
-+      <id>apache-staging</id>
-+      <name>Apache Repository</name>
-+      <url>https://repository.apache.org/content/groups/staging/</url>
-+      <releases>
-+        <enabled>true</enabled>
-+      </releases>
-+      <snapshots>
-+        <enabled>false</enabled>
-+      </snapshots>
-+    </repository>
-   </repositories>
- 
-   <modules>
-@@ -198,16 +214,16 @@
-     <module>client-common</module>
+     <spark.home>${execution.root}/dev/spark</spark.home>
+     <spark.bin.download.url>
+-      https://archive.apache.org/dist/spark/spark-2.2.3/spark-2.2.3-bin-hadoop2.7.tgz
++      https://archive.apache.org/dist/spark/spark-2.4.5/spark-2.4.5-bin-hadoop2.7.tgz
+     </spark.bin.download.url>
+-    <spark.bin.name>spark-2.2.3-bin-hadoop2.7</spark.bin.name>
++    <spark.bin.name>spark-2.4.5-bin-hadoop2.7</spark.bin.name>
+     <!--  used for testing, NCSARequestLog use it for access log  -->
+     <livy.log.dir>${basedir}/target</livy.log.dir>
+ 
+@@ -199,15 +205,18 @@
      <module>client-http</module>
      <module>core</module>
--    <module>core/scala-2.11</module>
-+    <module>core/scala-${scala.binary.version}</module>
+     <module>core/scala-2.11</module>
++    <module>core/scala-2.12</module>
      <module>coverage</module>
      <module>examples</module>
      <module>python-api</module>
      <module>repl</module>
--    <module>repl/scala-2.11</module>
-+    <module>repl/scala-${scala.binary.version}</module>
+     <module>repl/scala-2.11</module>
++    <module>repl/scala-2.12</module>
      <module>rsc</module>
      <module>scala</module>
      <module>scala-api</module>
--    <module>scala-api/scala-2.11</module>
-+    <module>scala-api/scala-${scala.binary.version}</module>
+     <module>scala-api/scala-2.11</module>
++    <module>scala-api/scala-2.12</module>
      <module>server</module>
      <module>test-lib</module>
      <module>integration-test</module>
-@@ -240,6 +256,12 @@
+@@ -240,6 +249,12 @@
        <artifactId>scalatra-scalatest_${scala.binary.version}</artifactId>
        <version>${scalatra.version}</version>
        <scope>test</scope>
@@ -608,7 +531,7 @@ index 6b4d5a4..513a35e 100644
      </dependency>
  
    </dependencies>
-@@ -254,7 +276,7 @@
+@@ -254,7 +269,7 @@
        </dependency>
  
        <dependency>
@@ -617,7 +540,7 @@ index 6b4d5a4..513a35e 100644
          <artifactId>kryo</artifactId>
          <version>${kryo.version}</version>
        </dependency>
-@@ -545,6 +567,12 @@
+@@ -545,6 +560,12 @@
          <version>${scalatra.version}</version>
        </dependency>
  
@@ -630,54 +553,46 @@ index 6b4d5a4..513a35e 100644
        <dependency>
          <groupId>org.scalatra</groupId>
          <artifactId>scalatra-test_${scala.binary.version}</artifactId>
-@@ -556,6 +584,11 @@
-         <artifactId>py4j</artifactId>
+@@ -557,6 +578,12 @@
          <version>${py4j.version}</version>
        </dependency>
+ 
 +      <dependency>
 +        <groupId>org.asynchttpclient</groupId>
 +        <artifactId>async-http-client</artifactId>
 +        <version>${asynchttpclient.version}</version>
 +      </dependency>
- 
++
        <!-- we need a version > 1.7.13 because of SLF4J-324 -->
        <dependency>
-@@ -611,7 +644,7 @@
+         <groupId>org.slf4j</groupId>
+@@ -611,7 +638,7 @@
          <plugin>
            <groupId>net.alchim31.maven</groupId>
            <artifactId>scala-maven-plugin</artifactId>
 -          <version>3.2.2</version>
-+          <version>4.4.0</version>
++          <version>4.3.0</version>
            <executions>
              <execution>
                <goals>
-@@ -629,8 +662,6 @@
-           </executions>
+@@ -630,7 +657,6 @@
            <configuration>
              <scalaVersion>${scala.version}</scalaVersion>
--            <recompileMode>incremental</recompileMode>
+             <recompileMode>incremental</recompileMode>
 -            <useZincServer>true</useZincServer>
              <checkMultipleScalaVersions>false</checkMultipleScalaVersions>
              <args>
                <arg>-unchecked</arg>
-@@ -661,7 +692,7 @@
+@@ -661,7 +687,7 @@
          <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-shade-plugin</artifactId>
 -          <version>2.4.2</version>
-+          <version>3.2.4</version>
++          <version>3.2.1</version>
          </plugin>
  
          <plugin>
-@@ -682,6 +713,7 @@
-               <project.version>${project.version}</project.version>
-               <skipRTests>${skipRTests}</skipRTests>
-               <skipPySpark3Tests>${skipPySpark3Tests}</skipPySpark3Tests>
-+              <skipPython2Tests>${skipPython2Tests}</skipPython2Tests>
-             </systemProperties>
-             <redirectTestOutputToFile>${test.redirectToFile}</redirectTestOutputToFile>
-             <useFile>${test.redirectToFile}</useFile>
-@@ -693,7 +725,7 @@
+@@ -693,7 +719,7 @@
          <plugin>
            <groupId>org.scalatest</groupId>
            <artifactId>scalatest-maven-plugin</artifactId>
@@ -686,65 +601,59 @@ index 6b4d5a4..513a35e 100644
            <configuration>
              <environmentVariables>
                <LIVY_TEST>true</LIVY_TEST>
-@@ -711,6 +743,7 @@
-               <project.version>${project.version}</project.version>
-               <skipRTests>${skipRTests}</skipRTests>
-               <skipPySpark3Tests>${skipPySpark3Tests}</skipPySpark3Tests>
-+              <skipPython2Tests>${skipPython2Tests}</skipPython2Tests>
-             </systemProperties>
-             <stdout>D</stdout>
-             <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-@@ -1032,6 +1065,7 @@
-         <spark.scala-2.11.version>2.3.3</spark.scala-2.11.version>
-         <spark.version>${spark.scala-2.11.version}</spark.version>
-         <netty.spark-2.11.version>4.1.17.Final</netty.spark-2.11.version>
-+        <asynchttpclient.version>2.10.1</asynchttpclient.version>
-         <spark.bin.download.url>
-           https://archive.apache.org/dist/spark/spark-2.3.3/spark-2.3.3-bin-hadoop2.7.tgz
-         </spark.bin.download.url>
-@@ -1052,6 +1086,7 @@
-         <netty.spark-2.11.version>4.1.17.Final</netty.spark-2.11.version>
-         <java.version>1.8</java.version>
-         <py4j.version>0.10.7</py4j.version>
-+        <asynchttpclient.version>2.10.1</asynchttpclient.version>
-         <spark.bin.download.url>
-           https://archive.apache.org/dist/spark/spark-2.4.3/spark-2.4.3-bin-hadoop2.7.tgz
-         </spark.bin.download.url>
-@@ -1059,6 +1094,32 @@
-       </properties>
+@@ -1025,37 +1051,29 @@
+       </modules>
      </profile>
  
-+    <profile>
+-    <!-- Spark version profiles -->
+-    <profile>
+-      <id>spark-2.3</id>
+-      <properties>
+-        <spark.scala-2.11.version>2.3.3</spark.scala-2.11.version>
+-        <spark.version>${spark.scala-2.11.version}</spark.version>
+-        <netty.spark-2.11.version>4.1.17.Final</netty.spark-2.11.version>
+-        <spark.bin.download.url>
+-          https://archive.apache.org/dist/spark/spark-2.3.3/spark-2.3.3-bin-hadoop2.7.tgz
+-        </spark.bin.download.url>
+-        <spark.bin.name>spark-2.3.3-bin-hadoop2.7</spark.bin.name>
+-      </properties>
+-    </profile>
+-
+     <profile>
+-      <id>spark-2.4</id>
 +      <id>spark-3.0</id>
-+      <activation>
-+        <property>
+       <activation>
+         <property>
+-          <name>spark-2.4</name>
 +          <name>spark-3.0</name>
-+        </property>
-+      </activation>
-+      <properties>
+         </property>
+       </activation>
+       <properties>
+-        <spark.scala-2.11.version>2.4.3</spark.scala-2.11.version>
 +        <spark.scala-2.12.version>3.0.0</spark.scala-2.12.version>
-+        <spark.version>${spark.scala-2.12.version}</spark.version>
-+        <scala.binary.version>2.12</scala.binary.version>
-+        <scala.version>${scala-2.12.version}</scala.version>
++        <spark.scala-2.11.version>2.4.5</spark.scala-2.11.version>
+         <spark.version>${spark.scala-2.11.version}</spark.version>
+-        <netty.spark-2.11.version>4.1.17.Final</netty.spark-2.11.version>
 +        <netty.spark-2.12.version>4.1.47.Final</netty.spark-2.12.version>
-+        <netty.version>${netty.spark-2.12.version}</netty.version>
-+        <java.version>1.8</java.version>
++        <netty.spark-2.11.version>4.1.47.Final</netty.spark-2.11.version>
++        <netty.version>${netty.spark-2.11.version}</netty.version>
+         <java.version>1.8</java.version>
+-        <py4j.version>0.10.7</py4j.version>
 +        <py4j.version>0.10.9</py4j.version>
-+        <json4s.version>3.6.6</json4s.version>
-+        <asynchttpclient.version>2.10.1</asynchttpclient.version>
-+        <skipRTests>true</skipRTests>
-+        <spark.bin.download.url>
-+          https://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz
-+        </spark.bin.download.url>
-+        <spark.bin.name>spark-3.0.0-bin-hadoop2.7</spark.bin.name>
-+      </properties>
-+    </profile>
-+
-     <profile>
-       <id>skip-parent-modules</id>
-       <activation>
++        <json4s.spark-2.11.version>3.5.3</json4s.spark-2.11.version>
++        <json4s.spark-2.12.version>3.6.6</json4s.spark-2.12.version>
++        <json4s.version>${json4s.spark-2.11.version}</json4s.version>
+         <spark.bin.download.url>
+-          https://archive.apache.org/dist/spark/spark-2.4.3/spark-2.4.3-bin-hadoop2.7.tgz
++          https://archive.apache.org/dist/spark/spark-3.1.2/spark-3.1.2-bin-hadoop3.2.tgz
+         </spark.bin.download.url>
+-        <spark.bin.name>spark-2.4.3-bin-hadoop2.7</spark.bin.name>
++        <spark.bin.name>spark-3.1.2-bin-hadoop3.2</spark.bin.name>
+       </properties>
+     </profile>
+ 
 diff --git a/repl/pom.xml b/repl/pom.xml
-index e8cbc2d..ac7d6f9 100644
+index e01f7ab..b7e7fc5 100644
 --- a/repl/pom.xml
 +++ b/repl/pom.xml
 @@ -175,6 +175,7 @@
@@ -755,11 +664,32 @@ index e8cbc2d..ac7d6f9 100644
                  </includes>
                </artifactSet>
                <filters>
+@@ -211,6 +212,8 @@
+                 json4s-ast_${scala.binary.version},
+                 json4s-core_${scala.binary.version},
+                 json4s-jackson_${scala.binary.version},
++                json4s-scalap_${scala.binary.version},
++                scala-xml_${scala.binary.version},
+                 paranamer,
+                 scalap
+               </excludeArtifactIds>
+diff --git a/repl/scala-2.11/pom.xml b/repl/scala-2.11/pom.xml
+index 4b85232..9bacd33 100644
+--- a/repl/scala-2.11/pom.xml
++++ b/repl/scala-2.11/pom.xml
+@@ -36,6 +36,7 @@
+     <scala.binary.version>2.11</scala.binary.version>
+     <spark.version>${spark.scala-2.11.version}</spark.version>
+     <netty.version>${netty.spark-2.11.version}</netty.version>
++    <json4s.version>${json4s.spark-2.11.version}</json4s.version>
+   </properties>
+ 
+ </project>
 diff --git a/repl/scala-2.11/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala b/repl/scala-2.11/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala
-index 7de2859..4fbf665 100644
+index 7de2859..98c478f 100644
 --- a/repl/scala-2.11/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala
 +++ b/repl/scala-2.11/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala
-@@ -27,12 +27,10 @@ import scala.tools.nsc.interpreter.IMain
+@@ -27,15 +27,12 @@ import scala.tools.nsc.interpreter.IMain
  import scala.tools.nsc.interpreter.JLineCompletion
  import scala.tools.nsc.interpreter.JPrintWriter
  import scala.tools.nsc.interpreter.Results.Result
@@ -769,62 +699,295 @@ index 7de2859..4fbf665 100644
  import org.apache.spark.repl.SparkILoop
  
 -import org.apache.livy.rsc.driver.SparkEntries
- 
- /**
-  * This represents a Spark interpreter. It is not thread safe.
-diff --git a/repl/src/main/scala/org/apache/livy/repl/SparkRInterpreter.scala b/repl/src/main/scala/org/apache/livy/repl/SparkRInterpreter.scala
-index c74c8c8..4b6aa25 100644
---- a/repl/src/main/scala/org/apache/livy/repl/SparkRInterpreter.scala
-+++ b/repl/src/main/scala/org/apache/livy/repl/SparkRInterpreter.scala
-@@ -205,7 +205,6 @@ class SparkRInterpreter(
-         sendRequest("""assign(".sparkRsession", SparkR:::callJStatic("org.apache.livy.repl.SparkRInterpreter", "getSparkSession"), envir = SparkR:::.sparkREnv)""")
-         sendRequest("""assign("spark", get(".sparkRsession", envir = SparkR:::.sparkREnv), envir=.GlobalEnv)""")
-       }
 -
-       sendRequest("""assign(".sqlc", SparkR:::callJStatic("org.apache.livy.repl.SparkRInterpreter", "getSQLContext"), envir = SparkR:::.sparkREnv)""")
-       sendRequest("""assign("sqlContext", get(".sqlc", envir = SparkR:::.sparkREnv), envir = .GlobalEnv)""")
-       // scalastyle:on line.size.limit
-diff --git a/repl/src/test/scala/org/apache/livy/repl/PythonInterpreterSpec.scala b/repl/src/test/scala/org/apache/livy/repl/PythonInterpreterSpec.scala
-index 4a78c61..45527e9 100644
---- a/repl/src/test/scala/org/apache/livy/repl/PythonInterpreterSpec.scala
-+++ b/repl/src/test/scala/org/apache/livy/repl/PythonInterpreterSpec.scala
-@@ -255,6 +255,11 @@ class Python2InterpreterSpec extends PythonBaseInterpreterSpec {
- 
-   implicit val formats = DefaultFormats
- 
-+  override protected def withFixture(test: NoArgTest) = {
-+    assume(!sys.props.getOrElse("skipPython2Tests", "false").toBoolean, "Skipping Python2 tests.")
-+    test()
+ /**
+- * This represents a Spark interpreter. It is not thread safe.
++ * This represents a Scala 2.11 Spark interpreter. It is not thread safe.
+  */
+ class SparkInterpreter(protected override val conf: SparkConf) extends AbstractSparkInterpreter {
+ 
+diff --git a/repl/scala-2.12/pom.xml b/repl/scala-2.12/pom.xml
+new file mode 100644
+index 0000000..d17a521
+--- /dev/null
++++ b/repl/scala-2.12/pom.xml
+@@ -0,0 +1,42 @@
++<?xml version="1.0" encoding="UTF-8"?>
++<!--
++  ~ Licensed to the Apache Software Foundation (ASF) under one or more
++  ~ contributor license agreements.  See the NOTICE file distributed with
++  ~ this work for additional information regarding copyright ownership.
++  ~ The ASF licenses this file to You under the Apache License, Version 2.0
++  ~ (the "License"); you may not use this file except in compliance with
++  ~ the License.  You may obtain a copy of the License at
++  ~
++  ~    http://www.apache.org/licenses/LICENSE-2.0
++  ~
++  ~ Unless required by applicable law or agreed to in writing, software
++  ~ distributed under the License is distributed on an "AS IS" BASIS,
++  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++  ~ See the License for the specific language governing permissions and
++  ~ limitations under the License.
++-->
++<project xmlns="http://maven.apache.org/POM/4.0.0"
++         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
++         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
++  <modelVersion>4.0.0</modelVersion>
++  <groupId>org.apache.livy</groupId>
++  <artifactId>livy-repl_2.12</artifactId>
++  <version>0.7.1-incubating</version>
++  <packaging>jar</packaging>
++
++  <parent>
++    <groupId>org.apache.livy</groupId>
++    <artifactId>livy-repl-parent</artifactId>
++    <version>0.7.1-incubating</version>
++    <relativePath>../pom.xml</relativePath>
++  </parent>
++
++  <properties>
++    <scala.version>${scala-2.12.version}</scala.version>
++    <scala.binary.version>2.12</scala.binary.version>
++    <spark.version>${spark.scala-2.12.version}</spark.version>
++    <netty.version>${netty.spark-2.12.version}</netty.version>
++    <json4s.version>${json4s.spark-2.12.version}</json4s.version>
++  </properties>
++
++</project>
+diff --git a/repl/scala-2.12/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala b/repl/scala-2.12/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala
+new file mode 100644
+index 0000000..bb8f7e5
+--- /dev/null
++++ b/repl/scala-2.12/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala
+@@ -0,0 +1,132 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *    http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.livy.repl
++
++import java.io.File
++import java.net.URLClassLoader
++import java.nio.file.{Files, Paths}
++
++import scala.tools.nsc.Settings
++import scala.tools.nsc.interpreter.Completion
++import scala.tools.nsc.interpreter.IMain
++import scala.tools.nsc.interpreter.JPrintWriter
++import scala.tools.nsc.interpreter.NoCompletion
++import scala.tools.nsc.interpreter.Results.Result
++
++import org.apache.spark.SparkConf
++import org.apache.spark.repl.SparkILoop
++
++/**
++ * This represents a Spark interpreter. It is not thread safe.
++ */
++class SparkInterpreter(protected override val conf: SparkConf) extends AbstractSparkInterpreter {
++
++  private var sparkILoop: SparkILoop = _
++
++  override def start(): Unit = {
++    require(sparkILoop == null)
++
++    val rootDir = conf.get("spark.repl.classdir", System.getProperty("java.io.tmpdir"))
++    val outputDir = Files.createTempDirectory(Paths.get(rootDir), "spark").toFile
++    outputDir.deleteOnExit()
++    conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath)
++
++    val settings = new Settings()
++    settings.processArguments(List("-Yrepl-class-based",
++      "-Yrepl-outdir", s"${outputDir.getAbsolutePath}"), true)
++    settings.usejavacp.value = true
++    settings.embeddedDefaults(Thread.currentThread().getContextClassLoader())
++
++    sparkILoop = new SparkILoop(None, new JPrintWriter(outputStream, true))
++    sparkILoop.settings = settings
++    sparkILoop.createInterpreter()
++    sparkILoop.initializeSynchronous()
++
++    restoreContextClassLoader {
++      sparkILoop.compilerClasspath
++      sparkILoop.ensureClassLoader
++      var classLoader = Thread.currentThread().getContextClassLoader
++      while (classLoader != null) {
++        if (classLoader.getClass.getCanonicalName ==
++          "org.apache.spark.util.MutableURLClassLoader") {
++          val extraJarPath = classLoader.asInstanceOf[URLClassLoader].getURLs()
++            // Check if the file exists. Otherwise an exception will be thrown.
++            .filter { u => u.getProtocol == "file" && new File(u.getPath).isFile }
++            // Livy rsc and repl are also in the extra jars list. Filter them out.
++            .filterNot { u => Paths.get(u.toURI).getFileName.toString.startsWith("livy-") }
++            // Some bad spark packages depend on the wrong version of scala-reflect. Blacklist it.
++            .filterNot { u =>
++              Paths.get(u.toURI).getFileName.toString.contains("org.scala-lang_scala-reflect")
++            }
++
++          extraJarPath.foreach { p => debug(s"Adding $p to Scala interpreter's class path...") }
++          sparkILoop.addUrlsToClassPath(extraJarPath: _*)
++          classLoader = null
++        } else {
++          classLoader = classLoader.getParent
++        }
++      }
++
++      postStart()
++    }
 +  }
 +
-   override def createInterpreter(): Interpreter = {
-     val sparkConf = new SparkConf()
-     PythonInterpreter(sparkConf, new SparkEntries(sparkConf))
-diff --git a/repl/src/test/scala/org/apache/livy/repl/PythonSessionSpec.scala b/repl/src/test/scala/org/apache/livy/repl/PythonSessionSpec.scala
-index b54be11..4e66545 100644
---- a/repl/src/test/scala/org/apache/livy/repl/PythonSessionSpec.scala
-+++ b/repl/src/test/scala/org/apache/livy/repl/PythonSessionSpec.scala
-@@ -170,7 +170,13 @@ abstract class PythonSessionSpec extends BaseSessionSpec(PySpark) {
-   }
- }
- 
--class Python2SessionSpec extends PythonSessionSpec
-+class Python2SessionSpec extends PythonSessionSpec {
++  override def close(): Unit = synchronized {
++    super.close()
 +
-+  override protected def withFixture(test: NoArgTest): Outcome = {
-+    assume(!sys.props.getOrElse("skipPython2Tests", "false").toBoolean, "Skipping Python 2 tests.")
-+    test()
++    if (sparkILoop != null) {
++      sparkILoop.closeInterpreter()
++      sparkILoop = null
++    }
 +  }
-+}
- 
- class Python3SessionSpec extends PythonSessionSpec with BeforeAndAfterAll {
- 
-diff --git a/repl/src/test/scala/org/apache/livy/repl/SQLInterpreterSpec.scala b/repl/src/test/scala/org/apache/livy/repl/SQLInterpreterSpec.scala
-index 781ed72..3d9d4ac 100644
---- a/repl/src/test/scala/org/apache/livy/repl/SQLInterpreterSpec.scala
-+++ b/repl/src/test/scala/org/apache/livy/repl/SQLInterpreterSpec.scala
-@@ -113,7 +113,7 @@ class SQLInterpreterSpec extends BaseInterpreterSpec {
-   it should "execute sql queries" in withInterpreter { interpreter =>
++
++  override protected def isStarted(): Boolean = {
++    sparkILoop != null
++  }
++
++  override protected def interpret(code: String): Result = {
++    sparkILoop.interpret(code)
++  }
++
++  override protected def completeCandidates(code: String, cursor: Int) : Array[String] = {
++    val completer : Completion = {
++      try {
++        val cls = Class.forName("scala.tools.nsc.interpreter.PresentationCompilerCompleter")
++        cls.getDeclaredConstructor(classOf[IMain]).newInstance(sparkILoop.intp)
++          .asInstanceOf[Completion]
++      } catch {
++        case e : ClassNotFoundException => NoCompletion
++      }
++    }
++    completer.complete(code, cursor).candidates.toArray
++  }
++
++  override protected def valueOfTerm(name: String): Option[Any] = {
++    // IMain#valueOfTerm will always return None, so use other way instead.
++    Option(sparkILoop.lastRequest.lineRep.call("$result"))
++  }
++
++  override protected def bind(name: String,
++      tpe: String,
++      value: Object,
++      modifier: List[String]): Unit = {
++    sparkILoop.beQuietDuring {
++      sparkILoop.bind(name, tpe, value, modifier)
++    }
++  }
++}
+diff --git a/repl/scala-2.12/src/test/scala/org/apache/livy/repl/SparkInterpreterSpec.scala b/repl/scala-2.12/src/test/scala/org/apache/livy/repl/SparkInterpreterSpec.scala
+new file mode 100644
+index 0000000..d922034
+--- /dev/null
++++ b/repl/scala-2.12/src/test/scala/org/apache/livy/repl/SparkInterpreterSpec.scala
+@@ -0,0 +1,68 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *    http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.livy.repl
++
++import org.scalatest._
++
++import org.apache.livy.LivyBaseUnitTestSuite
++
++class SparkInterpreterSpec extends FunSpec with Matchers with LivyBaseUnitTestSuite {
++  describe("SparkInterpreter") {
++    val interpreter = new SparkInterpreter(null)
++
++    it("should parse Scala compile error.") {
++      // Regression test for LIVY-.
++      val error =
++        """<console>:27: error: type mismatch;
++          | found   : Int
++          | required: String
++          |       sc.setJobGroup(groupName, groupName, true)
++          |                      ^
++          |<console>:27: error: type mismatch;
++          | found   : Int
++          | required: String
++          |       sc.setJobGroup(groupName, groupName, true)
++          |                                 ^
++          |""".stripMargin
++
++      val parsedError = AbstractSparkInterpreter.KEEP_NEWLINE_REGEX.split(error)
++
++      val expectedTraceback = parsedError.tail
++
++      val (ename, traceback) = interpreter.parseError(error)
++      ename shouldBe "<console>:27: error: type mismatch;"
++      traceback shouldBe expectedTraceback
++    }
++
++    it("should parse Scala runtime error.") {
++      val error =
++        """java.lang.RuntimeException: message
++          |    ... 48 elided
++          |
++          |Tailing message""".stripMargin
++
++      val parsedError = AbstractSparkInterpreter.KEEP_NEWLINE_REGEX.split(error)
++
++      val expectedTraceback = parsedError.tail
++
++      val (ename, traceback) = interpreter.parseError(error)
++      ename shouldBe "java.lang.RuntimeException: message"
++      traceback shouldBe expectedTraceback
++    }
++  }
++}
+diff --git a/repl/src/main/scala/org/apache/livy/repl/Session.scala b/repl/src/main/scala/org/apache/livy/repl/Session.scala
+index ea8a761..262c811 100644
+--- a/repl/src/main/scala/org/apache/livy/repl/Session.scala
++++ b/repl/src/main/scala/org/apache/livy/repl/Session.scala
+@@ -348,8 +348,10 @@ class Session(
+           case "1" =>
+             (s"""setJobGroup(sc, "$jobGroup", "Job group for statement $jobGroup", FALSE)""",
+              SparkR)
+-          case "2" =>
++          case "2" | "3" =>
+             (s"""setJobGroup("$jobGroup", "Job group for statement $jobGroup", FALSE)""", SparkR)
++          case v =>
++            throw new IllegalArgumentException(s"Unknown Spark major version [$v]")
+         }
+     }
+     // Set the job group
+diff --git a/repl/src/test/scala/org/apache/livy/repl/SQLInterpreterSpec.scala b/repl/src/test/scala/org/apache/livy/repl/SQLInterpreterSpec.scala
+index 781ed72..3d9d4ac 100644
+--- a/repl/src/test/scala/org/apache/livy/repl/SQLInterpreterSpec.scala
++++ b/repl/src/test/scala/org/apache/livy/repl/SQLInterpreterSpec.scala
+@@ -113,7 +113,7 @@ class SQLInterpreterSpec extends BaseInterpreterSpec {
+   it should "execute sql queries" in withInterpreter { interpreter =>
      val rdd = sparkEntries.sc().parallelize(Seq(People("Jerry", 20), People("Michael", 21)))
      val df = sparkEntries.sqlctx().createDataFrame(rdd)
 -    df.registerTempTable("people")
@@ -842,22 +1005,10 @@ index 781ed72..3d9d4ac 100644
      val resp1 = interpreter.execute(
        """
 diff --git a/rsc/pom.xml b/rsc/pom.xml
-index c783a31..c9fe003 100644
+index 057a592..97462b2 100644
 --- a/rsc/pom.xml
 +++ b/rsc/pom.xml
-@@ -49,6 +49,11 @@
-       <version>${project.version}</version>
-       <scope>test</scope>
-     </dependency>
-+    <dependency>
-+      <groupId>org.slf4j</groupId>
-+      <artifactId>slf4j-api</artifactId>
-+      <scope>provided</scope>
-+    </dependency>
-     <dependency>
-       <groupId>org.apache.livy</groupId>
-       <artifactId>livy-core_${scala.binary.version}</artifactId>
-@@ -58,7 +63,7 @@
+@@ -58,7 +58,7 @@
      </dependency>
  
      <dependency>
@@ -866,18 +1017,6 @@ index c783a31..c9fe003 100644
        <artifactId>kryo</artifactId>
      </dependency>
      <dependency>
-@@ -116,11 +121,6 @@
-       <artifactId>hadoop-common</artifactId>
-       <scope>provided</scope>
-     </dependency>
--    <dependency>
--      <groupId>org.slf4j</groupId>
--      <artifactId>slf4j-api</artifactId>
--      <scope>provided</scope>
--    </dependency>
-   </dependencies>
- 
-   <build>
 @@ -140,7 +140,9 @@
                <artifactSet>
                  <includes>
@@ -889,38 +1028,8 @@ index c783a31..c9fe003 100644
                  </includes>
                </artifactSet>
                <filters>
-diff --git a/rsc/src/main/java/org/apache/livy/rsc/ContextLauncher.java b/rsc/src/main/java/org/apache/livy/rsc/ContextLauncher.java
-index c59136d..293ff5a 100644
---- a/rsc/src/main/java/org/apache/livy/rsc/ContextLauncher.java
-+++ b/rsc/src/main/java/org/apache/livy/rsc/ContextLauncher.java
-@@ -243,7 +243,9 @@ class ContextLauncher {
-       launcher.setAppResource(SparkLauncher.NO_RESOURCE);
-       launcher.setPropertiesFile(confFile.getAbsolutePath());
-       launcher.setMainClass(RSCDriverBootstrapper.class.getName());
--
-+      if (conf.get(MASTER) != null) {
-+        launcher.setMaster(conf.get(MASTER));
-+      }
-       if (conf.get(PROXY_USER) != null) {
-         launcher.addSparkArg("--proxy-user", conf.get(PROXY_USER));
-       }
-diff --git a/rsc/src/main/java/org/apache/livy/rsc/RSCConf.java b/rsc/src/main/java/org/apache/livy/rsc/RSCConf.java
-index 4c45956..d8bcd85 100644
---- a/rsc/src/main/java/org/apache/livy/rsc/RSCConf.java
-+++ b/rsc/src/main/java/org/apache/livy/rsc/RSCConf.java
-@@ -82,7 +82,9 @@ public class RSCConf extends ClientConf<RSCConf> {
-     RETAINED_SHARE_VARIABLES("retained.share-variables", 100),
- 
-     // Number of result rows to get for SQL Interpreters.
--    SQL_NUM_ROWS("sql.num-rows", 1000);
-+    SQL_NUM_ROWS("sql.num-rows", 1000),
-+
-+    MASTER("spark.master", null);
- 
-     private final String key;
-     private final Object dflt;
 diff --git a/rsc/src/main/java/org/apache/livy/rsc/driver/SparkEntries.java b/rsc/src/main/java/org/apache/livy/rsc/driver/SparkEntries.java
-index c64fc72..77e5a18 100644
+index c64fc72..6726bb1 100644
 --- a/rsc/src/main/java/org/apache/livy/rsc/driver/SparkEntries.java
 +++ b/rsc/src/main/java/org/apache/livy/rsc/driver/SparkEntries.java
 @@ -17,7 +17,6 @@
@@ -958,15 +1067,6 @@ index c64fc72..77e5a18 100644
            if (conf.getBoolean("spark.repl.enableHiveContext", false) ||
              conf.get("spark.sql.catalogImplementation", "in-memory").toLowerCase()
                .equals("hive")) {
-@@ -123,7 +122,7 @@ public class SparkEntries {
-                "classpath.");
-             }
-             hivectx = new HiveContext(sc().sc());
--            LOG.info("Created HiveContext.");
-+            LOG.info("Created hive HiveContext.");
-           }
-         }
-       }
 diff --git a/rsc/src/main/java/org/apache/livy/rsc/rpc/KryoMessageCodec.java b/rsc/src/main/java/org/apache/livy/rsc/rpc/KryoMessageCodec.java
 index b860e65..191ea50 100644
 --- a/rsc/src/main/java/org/apache/livy/rsc/rpc/KryoMessageCodec.java
@@ -989,44 +1089,74 @@ index b860e65..191ea50 100644
  import io.netty.buffer.ByteBuf;
  import io.netty.channel.ChannelHandlerContext;
  import io.netty.handler.codec.ByteToMessageCodec;
+diff --git a/scala-api/scala-2.12/pom.xml b/scala-api/scala-2.12/pom.xml
+new file mode 100644
+index 0000000..c5d8d7f
+--- /dev/null
++++ b/scala-api/scala-2.12/pom.xml
+@@ -0,0 +1,38 @@
++<?xml version="1.0" encoding="UTF-8"?>
++<!--
++  ~ Licensed to the Apache Software Foundation (ASF) under one or more
++  ~ contributor license agreements.  See the NOTICE file distributed with
++  ~ this work for additional information regarding copyright ownership.
++  ~ The ASF licenses this file to You under the Apache License, Version 2.0
++  ~ (the "License"); you may not use this file except in compliance with
++  ~ the License.  You may obtain a copy of the License at
++  ~
++  ~    http://www.apache.org/licenses/LICENSE-2.0
++  ~
++  ~ Unless required by applicable law or agreed to in writing, software
++  ~ distributed under the License is distributed on an "AS IS" BASIS,
++  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++  ~ See the License for the specific language governing permissions and
++  ~ limitations under the License.
++-->
++<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
++  <modelVersion>4.0.0</modelVersion>
++  <groupId>org.apache.livy</groupId>
++  <artifactId>livy-scala-api_2.12</artifactId>
++  <version>0.7.1-incubating</version>
++  <packaging>jar</packaging>
++
++  <parent>
++    <groupId>org.apache.livy</groupId>
++    <artifactId>livy-scala-api-parent</artifactId>
++    <version>0.7.1-incubating</version>
++    <relativePath>../pom.xml</relativePath>
++  </parent>
++
++  <properties>
++    <scala.version>${scala-2.12.version}</scala.version>
++    <scala.binary.version>2.12</scala.binary.version>
++    <spark.version>${spark.scala-2.12.version}</spark.version>
++    <netty.version>${netty.spark-2.12.version}</netty.version>
++  </properties>
++</project>
+diff --git a/scala-api/src/main/resources/build.marker b/scala-api/src/main/resources/build.marker
+new file mode 100644
+index 0000000..e69de29
 diff --git a/scala-api/src/main/scala/org/apache/livy/scalaapi/ScalaJobHandle.scala b/scala-api/src/main/scala/org/apache/livy/scalaapi/ScalaJobHandle.scala
-index d1cf29d..7c6d58d 100644
+index d1cf29d..a04cbfa 100644
 --- a/scala-api/src/main/scala/org/apache/livy/scalaapi/ScalaJobHandle.scala
 +++ b/scala-api/src/main/scala/org/apache/livy/scalaapi/ScalaJobHandle.scala
-@@ -190,6 +190,19 @@ class ScalaJobHandle[T] private[livy] (jobHandle: JobHandle[T]) extends Future[T
+@@ -190,6 +190,14 @@ class ScalaJobHandle[T] private[livy] (jobHandle: JobHandle[T]) extends Future[T
      getJavaFutureResult(jobHandle, atMost)
      this
    }
 +
-+  // These two methods must be implemented in Scala 2.12. They're implemented as a no-op here
-+  // and then filled in with a real implementation in the two subclasses below. The no-op exists
-+  // here so that those implementations can declare "override", necessary in 2.12, while working
-+  // in 2.11, where the method doesn't exist in the superclass.
-+  // After 2.11 support goes away, remove these two:
-+
++  // These two methods must be implemented in Scala 2.12. They are implemented as unsupported
++  // operations here.
 +  def transform[S](f: (Try[T]) => Try[S])(implicit executor: ExecutionContext): Future[S] =
 +    throw new UnsupportedOperationException()
 +
 +  def transformWith[S](f: (Try[T]) => Future[S])(implicit executor: ExecutionContext): Future[S] =
 +    throw new UnsupportedOperationException()
-+
  }
  
  private abstract class AbstractScalaJobHandleListener[T] extends Listener[T] {
-diff --git a/scala-api/src/test/scala/org/apache/livy/scalaapi/ScalaClientTestUtils.scala b/scala-api/src/test/scala/org/apache/livy/scalaapi/ScalaClientTestUtils.scala
-index 458ff3b..a5232db 100644
---- a/scala-api/src/test/scala/org/apache/livy/scalaapi/ScalaClientTestUtils.scala
-+++ b/scala-api/src/test/scala/org/apache/livy/scalaapi/ScalaClientTestUtils.scala
-@@ -22,6 +22,7 @@ import java.util.concurrent.{CountDownLatch, TimeUnit}
- import scala.collection.mutable.ArrayBuffer
- import scala.concurrent.{Await, Future}
- import scala.concurrent.duration._
-+import scala.language.postfixOps
- 
- import org.scalatest.FunSuite
- 
 diff --git a/server/pom.xml b/server/pom.xml
-index d8d8e83..babf86a 100644
+index 773d51f..4fe452f 100644
 --- a/server/pom.xml
 +++ b/server/pom.xml
 @@ -200,7 +200,7 @@
@@ -1051,25 +1181,11 @@ index d8d8e83..babf86a 100644
      </dependency>
  
      <dependency>
-@@ -367,5 +373,17 @@
-     </plugins>
+@@ -368,4 +374,3 @@
    </build>
  
--</project>
-+  <profiles>
-+    <profile>
-+      <id>spark-3.0</id>
-+      <dependencies>
-+        <dependency>
-+          <groupId>org.json4s</groupId>
-+          <artifactId>json4s-scalap_2.12</artifactId>
-+          <version>3.6.6</version>
-+        </dependency>
-+      </dependencies>
-+    </profile>
-+  </profiles>
- 
-+</project>
+ </project>
+-
 diff --git a/server/src/main/scala/org/apache/livy/server/SessionServlet.scala b/server/src/main/scala/org/apache/livy/server/SessionServlet.scala
 index 1fc27a5..a726e7d 100644
 --- a/server/src/main/scala/org/apache/livy/server/SessionServlet.scala
@@ -1084,40 +1200,25 @@ index 1fc27a5..a726e7d 100644
    /**
     * Creates a new session based on the current request. The implementation is responsible for
 diff --git a/server/src/main/scala/org/apache/livy/server/interactive/InteractiveSession.scala b/server/src/main/scala/org/apache/livy/server/interactive/InteractiveSession.scala
-index cdeddda..8d579b9 100644
+index cdeddda..9f39372 100644
 --- a/server/src/main/scala/org/apache/livy/server/interactive/InteractiveSession.scala
 +++ b/server/src/main/scala/org/apache/livy/server/interactive/InteractiveSession.scala
-@@ -208,7 +208,13 @@ object InteractiveSession extends Logging {
+@@ -204,11 +204,13 @@ object InteractiveSession extends Logging {
+       } else {
+         val sparkHome = livyConf.sparkHome().get
+         val libdir = sparkMajorVersion match {
+-          case 2 =>
++          case 2 | 3 =>
              if (new File(sparkHome, "RELEASE").isFile) {
                new File(sparkHome, "jars")
-             } else {
--              new File(sparkHome, "assembly/target/scala-2.11/jars")
-+              new File(sparkHome, "assembly/target/scala-*/jars")
-+            }
-+          case 3 =>
-+            if (new File(sparkHome, "RELEASE").isFile) {
-+              new File(sparkHome, "jars")
+-            } else {
++            } else if (new File(sparkHome, "assembly/target/scala-2.11/jars").isDirectory) {
+               new File(sparkHome, "assembly/target/scala-2.11/jars")
 +            } else {
 +              new File(sparkHome, "assembly/target/scala-2.12/jars")
              }
            case v =>
              throw new RuntimeException(s"Unsupported Spark major version: $sparkMajorVersion")
-@@ -410,13 +416,13 @@ class InteractiveSession(
-     } else {
-       val uriFuture = Future { client.get.getServerUri.get() }
- 
--      uriFuture.onSuccess { case url =>
-+      uriFuture.foreach { case url =>
-         rscDriverUri = Option(url)
-         sessionSaveLock.synchronized {
-           sessionStore.save(RECOVERY_SESSION_TYPE, recoveryMetadata)
-         }
-       }
--      uriFuture.onFailure { case e => warn("Fail to get rsc uri", e) }
-+      uriFuture.failed.foreach { case e => warn("Fail to get rsc uri", e) }
- 
-       // Send a dummy job that will return once the client is ready to be used, and set the
-       // state to "idle" at that point.
 diff --git a/server/src/main/scala/org/apache/livy/utils/LivySparkUtils.scala b/server/src/main/scala/org/apache/livy/utils/LivySparkUtils.scala
 index dc98b0d..c94b199 100644
 --- a/server/src/main/scala/org/apache/livy/utils/LivySparkUtils.scala
@@ -1226,7 +1327,7 @@ index c97aa19..78407d5 100644
  import org.apache.livy.{ExecuteRequest, LivyConf}
  import org.apache.livy.client.common.HttpMessages.SessionInfo
 diff --git a/server/src/test/scala/org/apache/livy/server/interactive/InteractiveSessionSpec.scala b/server/src/test/scala/org/apache/livy/server/interactive/InteractiveSessionSpec.scala
-index 55f0e21..c3b02e1 100644
+index 55f0e21..d13e682 100644
 --- a/server/src/test/scala/org/apache/livy/server/interactive/InteractiveSessionSpec.scala
 +++ b/server/src/test/scala/org/apache/livy/server/interactive/InteractiveSessionSpec.scala
 @@ -31,7 +31,7 @@ import org.mockito.Matchers._
@@ -1238,64 +1339,6 @@ index 55f0e21..c3b02e1 100644
  
  import org.apache.livy.{ExecuteRequest, JobHandle, LivyBaseUnitTestSuite, LivyConf}
  import org.apache.livy.rsc.{PingJob, RSCClient, RSCConf}
-@@ -189,34 +189,27 @@ class InteractiveSessionSpec extends FunSpec
-       assert(properties(RSCConf.Entry.RPC_CHANNEL_LOG_LEVEL.key()) === "TRACE")
-     }
- 
--    withSession("should execute `1 + 2` == 3") { session =>
--      val pyResult = executeStatement("1 + 2", Some("pyspark"))
--      pyResult should equal (Extraction.decompose(Map(
-+    withSession("should execute `1 + 2` == 3 (pyspark)") { session =>
-+      executeStatement("1 + 2", Some("pyspark")) should equal (Extraction.decompose(Map(
-         "status" -> "ok",
-         "execution_count" -> 0,
-         "data" -> Map("text/plain" -> "3")))
-       )
-+    }
- 
--      val scalaResult = executeStatement("1 + 2", Some("spark"))
--      scalaResult should equal (Extraction.decompose(Map(
-+    withSession("should execute `1 + 2` == 3 (spark)") { session =>
-+      executeStatement("1 + 2", Some("spark")) should equal (Extraction.decompose(Map(
-         "status" -> "ok",
-         "execution_count" -> 1,
-         "data" -> Map("text/plain" -> "res0: Int = 3\n")))
-       )
--
--      val rResult = executeStatement("1 + 2", Some("sparkr"))
--      rResult should equal (Extraction.decompose(Map(
--        "status" -> "ok",
--        "execution_count" -> 2,
--        "data" -> Map("text/plain" -> "[1] 3")))
--      )
-     }
- 
-     withSession("should report an error if accessing an unknown variable") { session =>
-       val result = executeStatement("x")
-       val expectedResult = Extraction.decompose(Map(
-         "status" -> "error",
--        "execution_count" -> 3,
-+        "execution_count" -> 2,
-         "ename" -> "NameError",
-         "evalue" -> "name 'x' is not defined",
-         "traceback" -> List(
-@@ -231,6 +224,16 @@ class InteractiveSessionSpec extends FunSpec
-       }
-     }
- 
-+    withSession("should execute `1 + 2` == 3 (sparkr)") { session => {
-+        assume(!sys.props.getOrElse("skipRTests", "false").toBoolean, "Skipping R tests.")
-+        executeStatement("1 + 2", Some("sparkr")) should equal (Extraction.decompose(Map(
-+          "status" -> "ok",
-+          "execution_count" -> 3,
-+          "data" -> Map("text/plain" -> "[1] 3")))
-+        )
-+      }
-+    }
-+
-     withSession("should get statement progress along with statement result") { session =>
-       val code =
-         """
 diff --git a/server/src/test/scala/org/apache/livy/server/interactive/JobApiSpec.scala b/server/src/test/scala/org/apache/livy/server/interactive/JobApiSpec.scala
 index 1646492..8ec0b19 100644
 --- a/server/src/test/scala/org/apache/livy/server/interactive/JobApiSpec.scala
@@ -1487,29 +1530,29 @@ index 19abb0d..780a318 100644
    }
  
 diff --git a/thriftserver/session/pom.xml b/thriftserver/session/pom.xml
-index eb44dcf..0263c6e 100644
+index d56f7c2..d0018ae 100644
 --- a/thriftserver/session/pom.xml
 +++ b/thriftserver/session/pom.xml
-@@ -106,5 +106,17 @@
-         <json4s.version>3.5.3</json4s.version>
-       </properties>
-     </profile>
-+    <profile>
-+      <id>spark-2.4-2.12</id>
-+      <properties>
-+        <json4s.version>3.5.3</json4s.version>
-+      </properties>
-+    </profile>
-+    <profile>
-+      <id>spark-3.0</id>
-+      <properties>
-+        <json4s.version>3.6.6</json4s.version>
-+      </properties>
-+    </profile>
-   </profiles>
+@@ -94,17 +94,4 @@
+     </plugins>
+   </build>
+ 
+-  <profiles>
+-    <!--
+-      Override the json4s version to match Spark 2.4's. This module doesn't use json4s, but the
+-      Spark APIs called in the tests require a different version of json4s than Livy's (and Livy
+-      doesn't really work with Spark's version yet).
+-    -->
+-    <profile>
+-      <id>spark-2.4</id>
+-      <properties>
+-        <json4s.version>3.5.3</json4s.version>
+-      </properties>
+-    </profile>
+-  </profiles>
  </project>
 diff --git a/thriftserver/session/src/test/java/org/apache/livy/thriftserver/session/ColumnBufferTest.java b/thriftserver/session/src/test/java/org/apache/livy/thriftserver/session/ColumnBufferTest.java
-index 71ed020..e9d679c 100644
+index 71ed020..b8832fb 100644
 --- a/thriftserver/session/src/test/java/org/apache/livy/thriftserver/session/ColumnBufferTest.java
 +++ b/thriftserver/session/src/test/java/org/apache/livy/thriftserver/session/ColumnBufferTest.java
 @@ -27,7 +27,6 @@ import java.util.List;
@@ -1538,10 +1581,10 @@ index 71ed020..e9d679c 100644
 -      .set("spark.sql.warehouse.dir", warehouse);
 -    SparkContext sc = new SparkContext(conf);
 +    SparkSession session = SparkSession.builder()
-+            .master("local")
-+            .appName(getClass().getName())
-+            .config("spark.sql.warehouse.dir", warehouse)
-+            .getOrCreate();
++      .master("local")
++      .appName(getClass().getName())
++      .config("spark.sql.warehouse.dir", warehouse)
++      .getOrCreate();
  
      try {
 -      SQLContext spark = SQLContext.getOrCreate(sc);
@@ -1558,368 +1601,3 @@ index 71ed020..e9d679c 100644
      }
    }
  
-diff --git a/core/scala-2.12/pom.xml b/core/scala-2.12/pom.xml
-new file mode 100644
-index 0000000..9f27512
---- /dev/null
-+++ b/core/scala-2.12/pom.xml
-@@ -0,0 +1,53 @@
-+<?xml version="1.0" encoding="UTF-8"?>
-+<!--
-+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-+  ~ contributor license agreements.  See the NOTICE file distributed with
-+  ~ this work for additional information regarding copyright ownership.
-+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-+  ~ (the "License"); you may not use this file except in compliance with
-+  ~ the License.  You may obtain a copy of the License at
-+  ~
-+  ~    http://www.apache.org/licenses/LICENSE-2.0
-+  ~
-+  ~ Unless required by applicable law or agreed to in writing, software
-+  ~ distributed under the License is distributed on an "AS IS" BASIS,
-+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-+  ~ See the License for the specific language governing permissions and
-+  ~ limitations under the License.
-+-->
-+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-+  <modelVersion>4.0.0</modelVersion>
-+  <groupId>org.apache.livy</groupId>
-+  <artifactId>livy-core_2.12</artifactId>
-+  <version>0.7.1-incubating</version>
-+  <packaging>jar</packaging>
-+
-+  <parent>
-+    <groupId>org.apache.livy</groupId>
-+    <artifactId>livy-core-parent</artifactId>
-+    <version>0.7.1-incubating</version>
-+    <relativePath>../pom.xml</relativePath>
-+  </parent>
-+
-+  <properties>
-+    <scala.version>${scala-2.12.version}</scala.version>
-+    <scala.binary.version>2.12</scala.binary.version>
-+  </properties>
-+
-+  <build>
-+    <plugins>
-+      <plugin>
-+        <groupId>org.apache.maven.plugins</groupId>
-+        <artifactId>maven-jar-plugin</artifactId>
-+        <executions>
-+          <execution>
-+            <goals>
-+              <goal>test-jar</goal>
-+            </goals>
-+          </execution>
-+        </executions>
-+      </plugin>
-+    </plugins>
-+  </build>
-+
-+</project>
-diff --git a/repl/scala-2.12/pom.xml b/repl/scala-2.12/pom.xml
-new file mode 100644
-index 0000000..4ddb528
---- /dev/null
-+++ b/repl/scala-2.12/pom.xml
-@@ -0,0 +1,41 @@
-+<?xml version="1.0" encoding="UTF-8"?>
-+<!--
-+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-+  ~ contributor license agreements.  See the NOTICE file distributed with
-+  ~ this work for additional information regarding copyright ownership.
-+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-+  ~ (the "License"); you may not use this file except in compliance with
-+  ~ the License.  You may obtain a copy of the License at
-+  ~
-+  ~    http://www.apache.org/licenses/LICENSE-2.0
-+  ~
-+  ~ Unless required by applicable law or agreed to in writing, software
-+  ~ distributed under the License is distributed on an "AS IS" BASIS,
-+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-+  ~ See the License for the specific language governing permissions and
-+  ~ limitations under the License.
-+-->
-+<project xmlns="http://maven.apache.org/POM/4.0.0"
-+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-+  <modelVersion>4.0.0</modelVersion>
-+  <groupId>org.apache.livy</groupId>
-+  <artifactId>livy-repl_2.12</artifactId>
-+  <version>0.7.1-incubating</version>
-+  <packaging>jar</packaging>
-+
-+  <parent>
-+    <groupId>org.apache.livy</groupId>
-+    <artifactId>livy-repl-parent</artifactId>
-+    <version>0.7.1-incubating</version>
-+    <relativePath>../pom.xml</relativePath>
-+  </parent>
-+
-+  <properties>
-+    <scala.version>${scala-2.12.version}</scala.version>
-+    <scala.binary.version>2.12</scala.binary.version>
-+    <spark.version>${spark.scala-2.12.version}</spark.version>
-+    <netty.version>${netty.spark-2.12.version}</netty.version>
-+  </properties>
-+
-+</project>
-diff --git a/repl/scala-2.12/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala b/repl/scala-2.12/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala
-new file mode 100644
-index 0000000..40c1c6b
---- /dev/null
-+++ b/repl/scala-2.12/src/main/scala/org/apache/livy/repl/SparkInterpreter.scala
-@@ -0,0 +1,135 @@
-+/*
-+ * Licensed to the Apache Software Foundation (ASF) under one or more
-+ * contributor license agreements.  See the NOTICE file distributed with
-+ * this work for additional information regarding copyright ownership.
-+ * The ASF licenses this file to You under the Apache License, Version 2.0
-+ * (the "License"); you may not use this file except in compliance with
-+ * the License.  You may obtain a copy of the License at
-+ *
-+ *    http://www.apache.org/licenses/LICENSE-2.0
-+ *
-+ * Unless required by applicable law or agreed to in writing, software
-+ * distributed under the License is distributed on an "AS IS" BASIS,
-+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-+ * See the License for the specific language governing permissions and
-+ * limitations under the License.
-+ */
-+
-+package org.apache.livy.repl
-+
-+import java.io.File
-+import java.net.URLClassLoader
-+import java.nio.file.{Files, Paths}
-+
-+import scala.tools.nsc.Settings
-+import scala.tools.nsc.interpreter.Completion
-+import scala.tools.nsc.interpreter.IMain
-+import scala.tools.nsc.interpreter.JPrintWriter
-+import scala.tools.nsc.interpreter.NoCompletion
-+import scala.tools.nsc.interpreter.Results.Result
-+import scala.util.control.NonFatal
-+
-+import org.apache.spark.SparkConf
-+import org.apache.spark.repl.SparkILoop
-+
-+import org.apache.livy.rsc.driver.SparkEntries
-+
-+/**
-+ * This represents a Spark interpreter. It is not thread safe.
-+ */
-+class SparkInterpreter(protected override val conf: SparkConf) extends AbstractSparkInterpreter {
-+
-+  private var sparkILoop: SparkILoop = _
-+
-+  override def start(): Unit = {
-+    require(sparkILoop == null)
-+
-+    val rootDir = conf.get("spark.repl.classdir", System.getProperty("java.io.tmpdir"))
-+    val outputDir = Files.createTempDirectory(Paths.get(rootDir), "spark").toFile
-+    outputDir.deleteOnExit()
-+    conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath)
-+
-+    val settings = new Settings()
-+    settings.processArguments(List("-Yrepl-class-based",
-+      "-Yrepl-outdir", s"${outputDir.getAbsolutePath}"), true)
-+    settings.usejavacp.value = true
-+    settings.embeddedDefaults(Thread.currentThread().getContextClassLoader())
-+
-+    sparkILoop = new SparkILoop(None, new JPrintWriter(outputStream, true))
-+    sparkILoop.settings = settings
-+    sparkILoop.createInterpreter()
-+    sparkILoop.initializeSynchronous()
-+
-+    restoreContextClassLoader {
-+      sparkILoop.compilerClasspath
-+      sparkILoop.ensureClassLoader
-+      var classLoader = Thread.currentThread().getContextClassLoader
-+      while (classLoader != null) {
-+        if (classLoader.getClass.getCanonicalName ==
-+          "org.apache.spark.util.MutableURLClassLoader") {
-+          val extraJarPath = classLoader.asInstanceOf[URLClassLoader].getURLs()
-+            // Check if the file exists. Otherwise an exception will be thrown.
-+            .filter { u => u.getProtocol == "file" && new File(u.getPath).isFile }
-+            // Livy rsc and repl are also in the extra jars list. Filter them out.
-+            .filterNot { u => Paths.get(u.toURI).getFileName.toString.startsWith("livy-") }
-+            // Some bad spark packages depend on the wrong version of scala-reflect. Blacklist it.
-+            .filterNot { u =>
-+              Paths.get(u.toURI).getFileName.toString.contains("org.scala-lang_scala-reflect")
-+            }
-+
-+          extraJarPath.foreach { p => debug(s"Adding $p to Scala interpreter's class path...") }
-+          sparkILoop.addUrlsToClassPath(extraJarPath: _*)
-+          classLoader = null
-+        } else {
-+          classLoader = classLoader.getParent
-+        }
-+      }
-+
-+      postStart()
-+    }
-+  }
-+
-+  override def close(): Unit = synchronized {
-+    super.close()
-+
-+    if (sparkILoop != null) {
-+      sparkILoop.closeInterpreter()
-+      sparkILoop = null
-+    }
-+  }
-+
-+  override protected def isStarted(): Boolean = {
-+    sparkILoop != null
-+  }
-+
-+  override protected def interpret(code: String): Result = {
-+    sparkILoop.interpret(code)
-+  }
-+
-+  override protected def completeCandidates(code: String, cursor: Int) : Array[String] = {
-+    val completer : Completion = {
-+      try {
-+        val cls = Class.forName("scala.tools.nsc.interpreter.PresentationCompilerCompleter")
-+        cls.getDeclaredConstructor(classOf[IMain]).newInstance(sparkILoop.intp)
-+          .asInstanceOf[Completion]
-+      } catch {
-+        case e : ClassNotFoundException => NoCompletion
-+      }
-+    }
-+    completer.complete(code, cursor).candidates.toArray
-+  }
-+
-+  override protected def valueOfTerm(name: String): Option[Any] = {
-+    // IMain#valueOfTerm will always return None, so use other way instead.
-+    Option(sparkILoop.lastRequest.lineRep.call("$result"))
-+  }
-+
-+  override protected def bind(name: String,
-+      tpe: String,
-+      value: Object,
-+      modifier: List[String]): Unit = {
-+    sparkILoop.beQuietDuring {
-+      sparkILoop.bind(name, tpe, value, modifier)
-+    }
-+  }
-+}
-diff --git a/repl/scala-2.12/src/test/scala/org/apache/livy/repl/SparkInterpreterSpec.scala b/repl/scala-2.12/src/test/scala/org/apache/livy/repl/SparkInterpreterSpec.scala
-new file mode 100644
-index 0000000..d922034
---- /dev/null
-+++ b/repl/scala-2.12/src/test/scala/org/apache/livy/repl/SparkInterpreterSpec.scala
-@@ -0,0 +1,68 @@
-+/*
-+ * Licensed to the Apache Software Foundation (ASF) under one or more
-+ * contributor license agreements.  See the NOTICE file distributed with
-+ * this work for additional information regarding copyright ownership.
-+ * The ASF licenses this file to You under the Apache License, Version 2.0
-+ * (the "License"); you may not use this file except in compliance with
-+ * the License.  You may obtain a copy of the License at
-+ *
-+ *    http://www.apache.org/licenses/LICENSE-2.0
-+ *
-+ * Unless required by applicable law or agreed to in writing, software
-+ * distributed under the License is distributed on an "AS IS" BASIS,
-+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-+ * See the License for the specific language governing permissions and
-+ * limitations under the License.
-+ */
-+
-+package org.apache.livy.repl
-+
-+import org.scalatest._
-+
-+import org.apache.livy.LivyBaseUnitTestSuite
-+
-+class SparkInterpreterSpec extends FunSpec with Matchers with LivyBaseUnitTestSuite {
-+  describe("SparkInterpreter") {
-+    val interpreter = new SparkInterpreter(null)
-+
-+    it("should parse Scala compile error.") {
-+      // Regression test for LIVY-.
-+      val error =
-+        """<console>:27: error: type mismatch;
-+          | found   : Int
-+          | required: String
-+          |       sc.setJobGroup(groupName, groupName, true)
-+          |                      ^
-+          |<console>:27: error: type mismatch;
-+          | found   : Int
-+          | required: String
-+          |       sc.setJobGroup(groupName, groupName, true)
-+          |                                 ^
-+          |""".stripMargin
-+
-+      val parsedError = AbstractSparkInterpreter.KEEP_NEWLINE_REGEX.split(error)
-+
-+      val expectedTraceback = parsedError.tail
-+
-+      val (ename, traceback) = interpreter.parseError(error)
-+      ename shouldBe "<console>:27: error: type mismatch;"
-+      traceback shouldBe expectedTraceback
-+    }
-+
-+    it("should parse Scala runtime error.") {
-+      val error =
-+        """java.lang.RuntimeException: message
-+          |    ... 48 elided
-+          |
-+          |Tailing message""".stripMargin
-+
-+      val parsedError = AbstractSparkInterpreter.KEEP_NEWLINE_REGEX.split(error)
-+
-+      val expectedTraceback = parsedError.tail
-+
-+      val (ename, traceback) = interpreter.parseError(error)
-+      ename shouldBe "java.lang.RuntimeException: message"
-+      traceback shouldBe expectedTraceback
-+    }
-+  }
-+}
-diff --git a/scala-api/scala-2.12/pom.xml b/scala-api/scala-2.12/pom.xml
-new file mode 100644
-index 0000000..e70f817
---- /dev/null
-+++ b/scala-api/scala-2.12/pom.xml
-@@ -0,0 +1,38 @@
-+<?xml version="1.0" encoding="UTF-8"?>
-+<!--
-+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-+  ~ contributor license agreements.  See the NOTICE file distributed with
-+  ~ this work for additional information regarding copyright ownership.
-+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-+  ~ (the "License"); you may not use this file except in compliance with
-+  ~ the License.  You may obtain a copy of the License at
-+  ~
-+  ~    http://www.apache.org/licenses/LICENSE-2.0
-+  ~
-+  ~ Unless required by applicable law or agreed to in writing, software
-+  ~ distributed under the License is distributed on an "AS IS" BASIS,
-+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-+  ~ See the License for the specific language governing permissions and
-+  ~ limitations under the License.
-+-->
-+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-+  <modelVersion>4.0.0</modelVersion>
-+  <groupId>org.apache.livy</groupId>
-+  <artifactId>livy-scala-api_2.12</artifactId>
-+  <version>0.7.1-incubating</version>
-+  <packaging>jar</packaging>
-+
-+  <parent>
-+    <groupId>org.apache.livy</groupId>
-+    <artifactId>livy-scala-api-parent</artifactId>
-+    <version>0.7.1-incubating</version>
-+    <relativePath>../pom.xml</relativePath>
-+  </parent>
-+
-+  <properties>
-+    <scala.version>${scala-2.12.version}</scala.version>
-+    <scala.binary.version>2.12</scala.binary.version>
-+    <spark.version>${spark.scala-2.12.version}</spark.version>
-+    <netty.version>${netty.spark-2.12.version}</netty.version>
-+  </properties>
-+</project>
diff --git a/bigtop-packages/src/rpm/livy/SPECS/livy.spec b/bigtop-packages/src/rpm/livy/SPECS/livy.spec
index decd9c8..6be0afd 100644
--- a/bigtop-packages/src/rpm/livy/SPECS/livy.spec
+++ b/bigtop-packages/src/rpm/livy/SPECS/livy.spec
@@ -29,7 +29,7 @@ Summary: Livy Server
 URL: http://livy.incubator.apache.org/
 Group: Development/Libraries
 License: ASL 2.0
-Source0: %{name}-%{livy_base_version}.tar.gz
+Source0: %{name}-%{livy_base_version}.zip
 Source1: do-component-build
 Source2: install_%{name}.sh
 Source3: livy-server.svc
diff --git a/bigtop.bom b/bigtop.bom
index b5d2d43..5257a52 100644
--- a/bigtop.bom
+++ b/bigtop.bom
@@ -350,7 +350,7 @@ bigtop {
       name    = 'livy'
       relNotes = 'Apache Livy'
       version { base = '0.7.1'; pkg = base; release = 1 }
-      tarball { destination = "${name}-${version.base}.tar.gz"
+      tarball { destination = "${name}-${version.base}.zip"
                 source      = "apache-livy-${version.base}-incubating-src.zip" }
       url     { download_path = "incubator/livy/${version.base}-incubating/"
                 site = "${apache.APACHE_MIRROR}/${download_path}"