You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by pd...@apache.org on 2021/06/25 07:21:56 UTC

[zeppelin] branch master updated: [ZEPPELIN-5420] Remove deprecated R module

This is an automated email from the ASF dual-hosted git repository.

pdallig pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/master by this push:
     new 1d77c42  [ZEPPELIN-5420] Remove deprecated R module
1d77c42 is described below

commit 1d77c42cc3f137e041e5245630909e841618deb3
Author: Philipp Dallig <ph...@gmail.com>
AuthorDate: Wed May 19 08:11:27 2021 +0200

    [ZEPPELIN-5420] Remove deprecated R module
    
    ### What is this PR for?
    This module removes the deprecated R module.
    
    ### What type of PR is it?
    -  Refactoring
    
    ### What is the Jira issue?
    * https://issues.apache.org/jira/browse/ZEPPELIN-5420
    
    ### Questions:
    * Does the licenses files need update? Yes
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: Philipp Dallig <ph...@gmail.com>
    
    Closes #4145 from Reamer/remove_r_module and squashes the following commits:
    
    70153aa2f [Philipp Dallig] Remove deprecated R module
---
 LICENSE                                            |   7 -
 pom.xml                                            |  15 -
 r/R/install-dev.sh                                 |  41 --
 r/R/rzeppelin/DESCRIPTION                          |  28 --
 r/R/rzeppelin/LICENSE                              |  14 -
 r/R/rzeppelin/NAMESPACE                            |   7 -
 r/R/rzeppelin/R/common.R                           |  14 -
 r/R/rzeppelin/R/globals.R                          |   3 -
 r/R/rzeppelin/R/protocol.R                         |  35 --
 r/R/rzeppelin/R/rServer.R                          | 214 ---------
 r/R/rzeppelin/R/rzeppelin.R                        |  95 ----
 r/R/rzeppelin/R/scalaInterpreter.R                 | 123 -----
 r/R/rzeppelin/R/zzz.R                              |   9 -
 r/_tools/checkstyle.xml                            | 282 -----------
 r/_tools/scalastyle.xml                            | 146 ------
 r/pom.xml                                          | 351 --------------
 .../org/apache/zeppelin/rinterpreter/KnitR.java    | 133 ------
 .../org/apache/zeppelin/rinterpreter/RRepl.java    | 133 ------
 .../org/apache/zeppelin/rinterpreter/RStatics.java |  86 ----
 r/src/main/resources/interpreter-setting.json      |  61 ---
 .../org/apache/spark/api/r/RBackendHelper.scala    |  84 ----
 .../zeppelin/rinterpreter/KnitRInterpreter.scala   |  77 ---
 .../apache/zeppelin/rinterpreter/RContext.scala    | 320 -------------
 .../zeppelin/rinterpreter/RInterpreter.scala       | 168 -------
 .../zeppelin/rinterpreter/RReplInterpreter.scala   |  98 ----
 .../org/apache/zeppelin/rinterpreter/package.scala |  29 --
 .../zeppelin/rinterpreter/rscala/Package.scala     |  39 --
 .../zeppelin/rinterpreter/rscala/RClient.scala     | 527 ---------------------
 .../zeppelin/rinterpreter/rscala/RException.scala  |  31 --
 r/src/main/scala/scala/Console.scala               | 491 -------------------
 .../apache/spark/api/r/RBackendHelperTest.scala    |  49 --
 .../zeppelin/rinterpreter/RContextInitTest.scala   | 113 -----
 .../zeppelin/rinterpreter/RContextTest.scala       | 115 -----
 .../zeppelin/rinterpreter/RInterpreterTest.scala   | 141 ------
 .../apache/zeppelin/rinterpreter/WrapperTest.scala | 102 ----
 .../org/apache/zeppelin/rinterpreter/package.scala |  23 -
 36 files changed, 4204 deletions(-)

diff --git a/LICENSE b/LICENSE
index 5bd3e1a..7f75913 100644
--- a/LICENSE
+++ b/LICENSE
@@ -274,13 +274,6 @@ BSD 3-Clause licenses
 ========================================================================
 The following components are provided under the BSD 3-Clause license.  See file headers and project links for details.
 
-  (BSD 3 Clause) portions of rscala 1.0.6 (https://dahl.byu.edu/software/rscala/) - https://cran.r-project.org/web/packages/rscala/index.html
-   r/R/rzeppelin/R/{common.R, globals.R,protocol.R,rServer.R,scalaInterpreter.R,zzz.R }
-   r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/{Package.scala, RClient.scala}
-
-  (BSD 3 Clause) portions of Scala (http://www.scala-lang.org/download) - http://www.scala-lang.org/download/#License
-   r/src/main/scala/scala/Console.scala
-
   (BSD 3 Clause) diff.js (https://github.com/kpdecker/jsdiff)
 
   (BSD 3-Clause) Google Auth Library for Java (https://github.com/google/google-auth-library-java)
diff --git a/pom.xml b/pom.xml
index 19bd52b..5452261 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1852,21 +1852,6 @@
               <!-- package.json -->
               <exclude>**/package.json</exclude>
 
-              <!-- compiled R packages (binaries) -->
-              <exclude>**/R/lib/**</exclude>
-              <exclude>**/lib/rzeppelin/**</exclude>
-
-              <!--R-related files with alternative licenses-->
-
-              <exclude>**/R/rzeppelin/R/*.R</exclude>
-              <exclude>**/src/main/scala/scala/Console.scala</exclude>
-              <exclude>**/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/Package.scala</exclude>
-              <exclude>**/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/RClient.scala</exclude>
-
-               <!--The following files are mechanical-->
-              <exclude>**/R/rzeppelin/DESCRIPTION</exclude>
-              <exclude>**/R/rzeppelin/NAMESPACE</exclude>
-              
               <exclude>zeppelin-jupyter-interpreter/src/main/resources/grpc/jupyter/*.py</exclude>
             </excludes>
           </configuration>
diff --git a/r/R/install-dev.sh b/r/R/install-dev.sh
deleted file mode 100755
index a3b5224..0000000
--- a/r/R/install-dev.sh
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This scripts packages R files to create a package that can be loaded into R,
-# and also installs necessary packages.
-
-
-set -o pipefail
-set -e
-set -x
-
-FWDIR="$(cd `dirname $0`; pwd)"
-LIB_DIR="$FWDIR/../../R/lib"
-
-mkdir -p $LIB_DIR
-
-pushd $FWDIR > /dev/null
-
-# Generate Rd files if devtools is installed
-#Rscript -e ' if("devtools" %in% rownames(installed.packages())) { library(devtools); devtools::document(pkg="./pkg", roclets=c("rd")) }'
-
-# Install SparkR to $LIB_DIR
-R CMD INSTALL --library=$LIB_DIR $FWDIR/rzeppelin/
-
-popd > /dev/null
-set +x
\ No newline at end of file
diff --git a/r/R/rzeppelin/DESCRIPTION b/r/R/rzeppelin/DESCRIPTION
deleted file mode 100644
index 0d7d9ba..0000000
--- a/r/R/rzeppelin/DESCRIPTION
+++ /dev/null
@@ -1,28 +0,0 @@
-Package: rzeppelin
-Type: Package
-Title: Interface from scala to R, based on rscala, for the Apache Zeppelin project
-Version: 0.1.0
-Date: 2015-12-01
-Authors@R: c(person(given="David B.",family="Dahl",role=c("aut","cre"),email="dahl@stat.byu.edu"),
-             person(family="Scala developers",role="ctb",comment="see http://scala-lang.org/"))
-URL: http://dahl.byu.edu/software/rscala/
-Imports: utils,
-	evaluate
-Suggests:
-	googleVis,
-	htmltools,
-	knitr,
-	rCharts,
-	repr,
-	SparkR,
-	base64enc
-SystemRequirements: Scala (>= 2.10)
-Description:
-License: file LICENSE
-NeedsCompilation: no
-Packaged: 2015-05-15 13:36:01 UTC; dahl
-Author: David B. Dahl [aut, cre],
-  Scala developers [ctb] (see http://scala-lang.org/)
-Maintainer: Amos B. Elberg <am...@gmail.com>
-Repository:
-Date/Publication: 2015-12-01 21:50:02
diff --git a/r/R/rzeppelin/LICENSE b/r/R/rzeppelin/LICENSE
deleted file mode 100644
index 0ed96c4..0000000
--- a/r/R/rzeppelin/LICENSE
+++ /dev/null
@@ -1,14 +0,0 @@
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
\ No newline at end of file
diff --git a/r/R/rzeppelin/NAMESPACE b/r/R/rzeppelin/NAMESPACE
deleted file mode 100644
index 8afdfe6..0000000
--- a/r/R/rzeppelin/NAMESPACE
+++ /dev/null
@@ -1,7 +0,0 @@
-import(utils)
-
-export("rzeppelinPackage")
-export("progress_zeppelin")
-export(.z.put)
-export(.z.get)
-export(.z.input)
\ No newline at end of file
diff --git a/r/R/rzeppelin/R/common.R b/r/R/rzeppelin/R/common.R
deleted file mode 100644
index a52e22e..0000000
--- a/r/R/rzeppelin/R/common.R
+++ /dev/null
@@ -1,14 +0,0 @@
-strintrplt <- function(snippet,envir=parent.frame()) {
-  if ( ! is.character(snippet) ) stop("Character vector expected.")
-  if ( length(snippet) != 1 ) stop("Length of vector must be exactly one.")
-  m <- regexpr("@\\{([^\\}]+)\\}",snippet)
-  if ( m != -1 ) {
-    s1 <- substr(snippet,1,m-1)
-    s2 <- substr(snippet,m+2,m+attr(m,"match.length")-2)
-    s3 <- substr(snippet,m+attr(m,"match.length"),nchar(snippet))
-    strintrplt(paste(s1,paste(toString(eval(parse(text=s2),envir=envir)),collapse=" ",sep=""),s3,sep=""),envir)
-  } else snippet
-}
-
-
-
diff --git a/r/R/rzeppelin/R/globals.R b/r/R/rzeppelin/R/globals.R
deleted file mode 100644
index 17b59aa..0000000
--- a/r/R/rzeppelin/R/globals.R
+++ /dev/null
@@ -1,3 +0,0 @@
-lEtTeRs <- c(letters,LETTERS)
-alphabet <- c(lEtTeRs,0:9)
-
diff --git a/r/R/rzeppelin/R/protocol.R b/r/R/rzeppelin/R/protocol.R
deleted file mode 100644
index 0fe07e2..0000000
--- a/r/R/rzeppelin/R/protocol.R
+++ /dev/null
@@ -1,35 +0,0 @@
-UNSUPPORTED_TYPE <- 0L
-INTEGER <- 1L
-DOUBLE <-  2L
-BOOLEAN <- 3L
-STRING <-  4L
-DATE <- 5L
-DATETIME <- 6L
-UNSUPPORTED_STRUCTURE <- 10L
-NULLTYPE  <- 11L
-REFERENCE <- 12L
-ATOMIC    <- 13L
-VECTOR    <- 14L
-MATRIX    <- 15L
-LIST <- 16L
-DATAFRAME <- 17L
-S3CLASS <- 18L
-S4CLASS <- 19L
-JOBJ <- 20L
-EXIT          <- 100L
-RESET         <- 101L
-GC            <- 102L
-DEBUG         <- 103L
-EVAL          <- 104L
-SET           <- 105L
-SET_SINGLE    <- 106L
-SET_DOUBLE    <- 107L
-GET           <- 108L
-GET_REFERENCE <- 109L
-DEF           <- 110L
-INVOKE        <- 111L
-SCALAP        <- 112L
-OK <- 1000L
-ERROR <- 1001L
-UNDEFINED_IDENTIFIER <- 1002L
-CURRENT_SUPPORTED_SCALA_VERSION <- "2.10"
diff --git a/r/R/rzeppelin/R/rServer.R b/r/R/rzeppelin/R/rServer.R
deleted file mode 100644
index af74d7d..0000000
--- a/r/R/rzeppelin/R/rServer.R
+++ /dev/null
@@ -1,214 +0,0 @@
-rServe <- function(sockets) {
-  cc(sockets)
-  workspace <- sockets[['workspace']]
-  debug <- get("debug",envir=sockets[['env']])
-  while ( TRUE ) {
-    if ( debug ) cat("R DEBUG: Top of the loop waiting for a command.\n")
-    cmd <- rb(sockets,integer(0))
-    if ( cmd == EXIT ) {
-      if ( debug ) cat("R DEBUG: Got EXIT\n")
-      return()
-    } else if ( cmd == DEBUG ) {
-      if ( debug ) cat("R DEBUG: Got DEBUG\n")
-      newDebug <- ( rb(sockets,integer(0)) != 0 )
-      if ( debug != newDebug ) cat("R DEBUG: Debugging is now ",newDebug,"\n",sep="")
-      debug <- newDebug
-      assign("debug",debug,envir=sockets[['env']])
-    } else if ( cmd == EVAL ) {
-      if ( debug ) cat("R DEBUG: Got EVAL\n")
-      snippet <- rc(sockets)
-      output <- capture.output(result <- try(eval(parse(text=snippet),envir=workspace)))
-      if ( inherits(result,"try-error") ) {
-        wb(sockets,ERROR)
-        msg <- paste(c(output,attr(result,"condition")$message),collapse="\n")
-        wc(sockets,msg)
-      } else {
-        wb(sockets,OK)
-        output <- paste(output,collapse="\n")
-        wc(sockets,output)
-      }
-      assign(".rzeppelin.last.value",result,envir=workspace)
-    } else if ( cmd %in% c(SET,SET_SINGLE,SET_DOUBLE) ) {
-      if ( debug ) cat("R DEBUG: Got SET\n")
-      if ( cmd != SET ) index <- rc(sockets)
-      identifier <- rc(sockets)
-      dataStructure <- rb(sockets,integer(0))
-      if ( dataStructure == NULLTYPE ) {
-        if ( cmd == SET ) assign(identifier,NULL,envir=workspace)
-        else subassign(sockets,identifier,index,NULL,cmd==SET_SINGLE)
-      } else if ( dataStructure == ATOMIC ) {
-        dataType <- rb(sockets,integer(0))
-        if ( dataType == INTEGER ) value <- rb(sockets,integer(0))
-        else if ( dataType == DOUBLE ) value <- rb(sockets,double(0))
-        else if ( dataType == BOOLEAN ) value <- rb(sockets,integer(0)) != 0
-        else if ( dataType == STRING ) value <- rc(sockets)
-#        else if (dataType == DATE) value <- as.Date(rb(sockets,integer(0)), origin=as.Date("1970-01-01"))
-        else stop(paste("Unknown data type:",dataType))
-        if ( cmd == SET ) assign(identifier,value,envir=workspace)
-        else subassign(sockets,identifier,index,value,cmd==SET_SINGLE)
-      } else if ( dataStructure == VECTOR ) {
-        dataLength <- rb(sockets,integer(0))
-        dataType <- rb(sockets,integer(0))
-        if ( dataType == INTEGER ) value <- rb(sockets,integer(0),n=dataLength)
-        else if ( dataType == DOUBLE ) value <- rb(sockets,double(0),n=dataLength)
-        else if ( dataType == BOOLEAN ) value <- rb(sockets,integer(0),n=dataLength) != 0
-        else if ( dataType == STRING ) value <- sapply(1:dataLength,function(i) rc(sockets))
-#        else if ( dateType == DATE ) value <- as.Date(rb(sockets,integer(0), n = dataLength), origin=as.Date("1970-01-01"))
-        else stop(paste("Unknown data type:",dataType))
-        if ( cmd == SET ) assign(identifier,value,envir=workspace)
-        else subassign(sockets,identifier,index,value,cmd==SET_SINGLE)
-      } else if ( dataStructure == MATRIX ) {
-        dataNRow <- rb(sockets,integer(0))
-        dataNCol <- rb(sockets,integer(0))
-        dataLength <- dataNRow * dataNCol
-        dataType <- rb(sockets,integer(0))
-        if ( dataType == INTEGER ) value <- matrix(rb(sockets,integer(0),n=dataLength),nrow=dataNRow,byrow=TRUE)
-        else if ( dataType == DOUBLE ) value <- matrix(rb(sockets,double(0),n=dataLength),nrow=dataNRow,byrow=TRUE)
-        else if ( dataType == BOOLEAN ) value <- matrix(rb(sockets,integer(0),n=dataLength),nrow=dataNRow,byrow=TRUE) != 0
-        else if ( dataType == STRING ) value <- matrix(sapply(1:dataLength,function(i) rc(sockets)),nrow=dataNRow,byrow=TRUE)
-#        else if ( dateType == DATE) value <- matrix(as.Date(rb(sockets,integer(0),n=dataLength),
- #                 origin = as.Date("1970-01-01")),nrow=dataNRow,byrow=TRUE)
-        else stop(paste("Unknown data type:",dataType))
-        if ( cmd == SET ) assign(identifier,value,envir=workspace)
-        else subassign(sockets,identifier,index,value,cmd==SET_SINGLE)
-      } else if ( dataStructure == REFERENCE ) {
-        otherIdentifier <- rc(sockets)
-        if ( exists(otherIdentifier,envir=workspace$.) ) {
-          wb(sockets,OK)
-          value <- get(otherIdentifier,envir=workspace$.)
-          if ( cmd == SET ) assign(identifier,value,envir=workspace)
-          else subassign(sockets,identifier,index,value,cmd==SET_SINGLE)
-        } else {
-          wb(sockets,UNDEFINED_IDENTIFIER)
-        }
-      } else stop(paste("Unknown data structure:",dataStructure))
-    } else if ( cmd == GET ) {
-      if ( debug ) cat("R DEBUG: Got GET\n")
-      identifier <- rc(sockets)
-      value <- tryCatch(get(identifier,envir=workspace),error=function(e) e)
-      if ( is.null(value) ) {
-        wb(sockets,NULLTYPE)
-      } else if ( inherits(value,"error") ) {
-        wb(sockets,UNDEFINED_IDENTIFIER)
-      } else if ( ! is.atomic(value) ) {
-      # This is where code for lists, data.frames, S3, and S4 classes must go
-        wb(sockets,UNSUPPORTED_STRUCTURE)
-      } else if ( is.vector(value) ) {
-        type <- checkType(value)
-        if ( ( length(value) == 1 ) && ( ! get("length.one.as.vector",envir=sockets[['env']]) ) ) {
-          wb(sockets,ATOMIC)
-        } else {
-          wb(sockets,VECTOR)
-          wb(sockets,length(value))
-        }
-        wb(sockets,type)
-        if ( type == STRING ) {
-          if ( length(value) > 0 ) for ( i in 1:length(value) ) wc(sockets,value[i])
-        } else {
-          if ( type == BOOLEAN ) wb(sockets,as.integer(value))
-#          else if (type == DATE) wb(sockets,as.integer(value))
-          else wb(sockets,value)
-        }
-      } else if ( is.matrix(value) ) {
-        type <- checkType(value)
-        wb(sockets,MATRIX)
-        wb(sockets,dim(value))
-        wb(sockets,type)
-        if ( nrow(value) > 0 ) for ( i in 1:nrow(value) ) {
-          if ( type == STRING ) {
-            if ( ncol(value) > 0 ) for ( j in 1:ncol(value) ) wc(sockets,value[i,j])
-          }
-          else if ( type == BOOLEAN ) wb(sockets,as.integer(value[i,]))
-#          else if (type == DATE) wb(sockets, as.integer(value[i,]))
-          else wb(sockets,value[i,])
-        }
-      } else {
-        wb(sockets,UNSUPPORTED_STRUCTURE)
-      }
-    } else if ( cmd == GET_REFERENCE ) {
-      if ( debug ) cat("R DEBUG: Got GET_REFERENCE\n")
-      identifier <- rc(sockets)
-      value <- tryCatch(get(identifier,envir=workspace),error=function(e) e)
-      if ( inherits(value,"error") ) {
-        wb(sockets,UNDEFINED_IDENTIFIER)
-      } else {
-        wb(sockets,REFERENCE)
-        wc(sockets,new.reference(value,workspace$.))
-      }
-    } else if ( cmd == GC ) {
-      if ( debug ) cat("R DEBUG: Got GC\n")
-      workspace$. <- new.env(parent=workspace)
-    } else stop(paste("Unknown command:",cmd))
-    flush(sockets[['socketIn']])
-  }
-}
-
-subassign <- function(sockets,x,i,value,single=TRUE) {
-  workspace <- sockets[['workspace']]
-  assign(".rzeppelin.set.value",value,envir=workspace)
-  brackets <- if ( single ) c("[","]") else c("[[","]]")
-  output <- capture.output(result <- try(eval(parse(text=paste0(x,brackets[1],i,brackets[2]," <- .rzeppelin.set.value")),envir=workspace)))
-  if ( inherits(result,"try-error") ) {
-    wb(sockets,ERROR)
-    output <- paste(paste(output,collapse="\n"),paste(attr(result,"condition")$message,collapse="\n"),sep="\n")
-    wc(sockets,output)
-  } else {
-    wb(sockets,OK)
-  }
-  rm(".reppelin.set.value",envir=workspace)
-  invisible(value)
-}
-
-new.reference <- function(value,envir) {
-  name <- ""
-  while ( ( name == "" ) || ( exists(name,envir=envir) ) ) {
-    name <- paste0(sample(lEtTeRs,1),paste0(sample(alphabet,7,replace=TRUE),collapse=""))
-  }
-  assign(name,value,envir=envir)
-  name
-}
-
-newSockets <- function (portsFilename, debug, timeout)
-{
-	getPortNumbers <- function() {
-		delay <- 0.1
-		start <- proc.time()[3]
-		while (TRUE) {
-			if ((proc.time()[3] - start) > timeout)
-				stop("Timed out waiting for Scala to start.")
-			Sys.sleep(delay)
-			delay <- 1 * delay
-			if (file.exists(portsFilename)) {
-				line <- scan(portsFilename, n = 2, what = character(0),
-										 quiet = TRUE)
-				if (length(line) > 0)
-					return(as.numeric(line))
-			}
-		}
-	}
-	ports <- getPortNumbers()
-	file.remove(portsFilename)
-	if (debug)
-		cat("R DEBUG: Trying to connect to port:", paste(ports,
-																										 collapse = ","), "\n")
-	socketConnectionIn <- socketConnection(port = ports[1], blocking = TRUE,
-																				 open = "ab", timeout = 2678400)
-	socketConnectionOut <- socketConnection(port = ports[2],
-																					blocking = TRUE, open = "rb", timeout = 2678400)
-	functionCache <- new.env()
-	env <- new.env()
-	assign("open", TRUE, envir = env)
-	assign("debug", debug, envir = env)
-	assign("length.one.as.vector", FALSE, envir = env)
-	workspace <- new.env()
-	workspace$. <- new.env(parent = workspace)
-	result <- list(socketIn = socketConnectionIn, socketOut = socketConnectionOut,
-								 env = env, workspace = workspace, functionCache = functionCache)
-	class(result) <- "ScalaInterpreter"
-	status <- rb(result, integer(0))
-	if ((length(status) == 0) || (status != OK))
-		stop("Error instantiating interpreter.")
-	wc(result, toString(packageVersion("rzeppelin")))
-	flush(result[["socketIn"]])
-	result
-}
diff --git a/r/R/rzeppelin/R/rzeppelin.R b/r/R/rzeppelin/R/rzeppelin.R
deleted file mode 100644
index c033efb..0000000
--- a/r/R/rzeppelin/R/rzeppelin.R
+++ /dev/null
@@ -1,95 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-.zeppenv <- new.env()
-
-.z.ohandler = evaluate:::new_output_handler(
-	value = function(x) {
-		if (is.data.frame(x)) return(x)
-		if ("html" %in% class(x)) return(x)
-		if (require("htmltools") & require("knitr")) {
-			if ("htmlwidget" %in% class(x)) {
-				return(.z.show.htmlwidget(x))
-			}
-		}
-		if (isS4(x)) show(x)
-		else {
-			if (require("repr")) {
-				return(repr:::repr(x))
-			} else return(x)
-		}
-	}
-)
-
-# wrapper for evaluate
-.z.valuate <- function(input) evaluate:::evaluate(
-	input = input,
-	envir =.zeppenv,
-	debug = FALSE,
-	output_handler =.z.ohandler,
-	stop_on_error = 0
-)
-
-# converts data.tables to the format needed for display in zeppelin
-
-.z.table <- function(i) {
-
-	.zdfoutcon <- textConnection(".zdfout", open="w")
-	write.table(i,
-							col.names=TRUE, row.names=FALSE, sep="\t",
-							eol="\n", quote = FALSE, file = .zdfoutcon)
-	close(.zdfoutcon)
-	rm(.zdfoutcon)
-	.zdfout
-}
-
-.z.completion <- function(buf, cursor) {
-	utils:::.assignLinebuffer(buf)
-	utils:::.assignEnd(cursor)
-	utils:::.guessTokenFromLine()
-	utils:::.completeToken()
-	utils:::.retrieveCompletions()
-}
-
-.z.setProgress <- function(progress)  SparkR:::callJMethod(.rContext, "setProgress", progress %% 100)
-.z.incrementProgress <- function(increment = 1) SparkR:::callJMethod(.rContext, "incrementProgress", increment)
-
-.z.input <- function(name) SparkR:::callJMethod(.zeppelinContext, "input", name)
-
-.z.get <- function(name) {
-  isRDD <- SparkR:::callJStatic("org.apache.zeppelin.rinterpreter.RStatics", "testRDD", name)
-  obj <- SparkR:::callJStatic("org.apache.zeppelin.rinterpreter.RStatics", "getZ", name)
-  if (isRDD) SparkR:::RDD(obj)
-  else obj
- }
-
-.z.put <- function(name, object) {
-  if ("RDD" %in% class(object)) object <- SparkR:::getJRDD(object)
-  SparkR:::callJStatic("org.apache.zeppelin.rinterpreter.RStatics", "putZ", name, object)
- }
-
-.z.repr <- function(x) {
-    if (require(repr)) repr:::repr(x)
-    else toString(x)
- }
-
-progress_zeppelin <- function(...) {
-  list(init = function(x) .z.setProgress(0),
-    step = function() .z.incrementProgress,
-    term = function() {})
- }
-
diff --git a/r/R/rzeppelin/R/scalaInterpreter.R b/r/R/rzeppelin/R/scalaInterpreter.R
deleted file mode 100644
index c7b236f..0000000
--- a/r/R/rzeppelin/R/scalaInterpreter.R
+++ /dev/null
@@ -1,123 +0,0 @@
-rzeppelinPackage <- function(pkgname) {
-  environmentOfDependingPackage <- parent.env(parent.frame())
-  E <- new.env(parent=environmentOfDependingPackage)
-  E$initialized <- FALSE
-  E$pkgname <- pkgname
-  assign("E",E,envir=environmentOfDependingPackage)
-  invisible()
-}
-
-
-
-# Private
-
-checkType <- function(x) {
-  if ( is.integer(x) ) INTEGER
-  else if ( is.double(x) ) DOUBLE
-  else if ( is.logical(x) ) BOOLEAN
-  else if ( is.character(x) ) STRING
-  else if ( is.date(x)) DATE
-  else stop("Unsupported data type.")
-}
-
-checkType2 <- function(x) {
-  if ( is.integer(x) ) "Int"
-  else if ( is.double(x) ) "Double"
-  else if ( is.logical(x) ) "Boolean"
-  else if ( is.character(x) ) "String"
-  else if ( is.date(x) ) "Date"
-  else stop("Unsupported data type.")
-}
-
-convert <- function(x,t) {
-  if ( t == "Int" ) {
-    tt <- "atomic"
-    tm <- "integer"
-    loav <- FALSE
-  } else if ( t == "Double" ) {
-    tt <- "atomic"
-    tm <- "double"
-    loav <- FALSE
-  } else if ( t == "Boolean" ) {
-    tt <- "atomic"
-    tm <- "logical"
-    loav <- FALSE
-  } else if ( t == "String" ) {
-    tt <- "atomic"
-    tm <- "character"
-    loav <- FALSE
-  } else if ( t == "Array[Int]" ) {
-    tt <- "vector"
-    tm <- "integer"
-    loav <- TRUE
-  } else if ( t == "Array[Double]" ) {
-    tt <- "vector"
-    tm <- "double"
-    loav <- TRUE
-  } else if ( t == "Array[Boolean]" ) {
-    tt <- "vector"
-    tm <- "logical"
-    loav <- TRUE
-  } else if ( t == "Array[String]" ) {
-    tt <- "vector"
-    tm <- "character"
-    loav <- TRUE
-  } else if ( t == "Array[Array[Int]]" ) {
-    tt <- "matrix"
-    tm <- "integer"
-    loav <- TRUE
-  } else if ( t == "Array[Array[Double]]" ) {
-    tt <- "matrix"
-    tm <- "double"
-    loav <- TRUE
-  } else if ( t == "Array[Array[Boolean]]" ) {
-    tt <- "matrix"
-    tm <- "logical"
-    loav <- TRUE
-  } else if ( t == "Array[Array[String]]" ) {
-    tt <- "matrix"
-    tm <- "character"
-    loav <- TRUE
-  } else {
-    tt <- "reference"
-    tm <- "reference"
-    loav <- FALSE
-  }
-  v <- character(0)
-  if ( tt == "atomic" ) v <- c(v,sprintf("%s <- as.vector(%s)[1]",x,x))
-  else if ( tt == "vector" ) v <- c(v,sprintf("%s <- as.vector(%s)",x,x))
-  else if ( tt == "matrix" ) v <- c(v,sprintf("%s <- as.matrix(%s)",x,x))
-  if ( tm != "reference" ) v <- c(v,sprintf("storage.mode(%s) <- '%s'",x,tm))
-  if ( length(v) != 0 ) {
-    v <- c(sprintf("if ( ! inherits(%s,'ScalaInterpreterReference') ) {",x),paste("  ",v,sep=""),"}")
-  }
-  c(v,sprintf("intpSet(interpreter,'.',%s,length.one.as.vector=%s,quiet=TRUE)",x,loav))
-}
-
-cc <- function(c) {
-  if ( ! get("open",envir=c[['env']]) ) stop("The connection has already been closed.")
-}
-
-wb <- function(c,v) writeBin(v,c[['socketIn']],endian="big")
-
-wc <- function(c,v) {
-  bytes <- charToRaw(v)
-  wb(c,length(bytes))
-  writeBin(bytes,c[['socketIn']],endian="big",useBytes=TRUE)
-}
-
-# Sockets should be blocking, but that contract is not fulfilled when other code uses functions from the parallel library.  Program around their problem.
-rb <- function(c,v,n=1L) {
-  r <- readBin(c[['socketOut']],what=v,n=n,endian="big")
-  if ( length(r) == n ) r
-  else c(r,rb(c,v,n-length(r)))
-}
-
-# Sockets should be blocking, but that contract is not fulfilled when other code uses functions from the parallel library.  Program around their problem.
-rc <- function(c) {
-  length <- rb(c,integer(0))
-  r <- as.raw(c())
-  while ( length(r) != length ) r <- c(r,readBin(c[['socketOut']],what="raw",n=length,endian="big"))
-  rawToChar(r)
-}
-
diff --git a/r/R/rzeppelin/R/zzz.R b/r/R/rzeppelin/R/zzz.R
deleted file mode 100644
index d901b99..0000000
--- a/r/R/rzeppelin/R/zzz.R
+++ /dev/null
@@ -1,9 +0,0 @@
-typeMap <- list()
-typeMap[[INTEGER]] <- integer(0)
-typeMap[[DOUBLE]] <- double(0)
-typeMap[[BOOLEAN]] <- integer(0)
-typeMap[[STRING]] <- character(0)
-
-.onAttach <- function(libname, pkgname) {
-
-}
diff --git a/r/_tools/checkstyle.xml b/r/_tools/checkstyle.xml
deleted file mode 100644
index 618d74d..0000000
--- a/r/_tools/checkstyle.xml
+++ /dev/null
@@ -1,282 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-     http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!DOCTYPE module PUBLIC
-    "-//Puppy Crawl//DTD Check Configuration 1.3//EN"
-    "http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
-
-<!-- This is a checkstyle configuration file. For descriptions of what the 
-	following rules do, please see the checkstyle configuration page at http://checkstyle.sourceforge.net/config.html -->
-
-<module name="Checker">
-
-	<module name="FileTabCharacter">
-		<!-- Checks that there are no tab characters in the file. -->
-	</module>
-
-	<module name="NewlineAtEndOfFile">
-		<property name="lineSeparator" value="lf" />
-	</module>
-
-	<module name="RegexpSingleline">
-		<!-- Checks that FIXME is not used in comments. TODO is preferred. -->
-		<property name="format" value="((//.*)|(\*.*))FIXME" />
-		<property name="message"
-			value='TODO is preferred to FIXME.  e.g. "TODO(johndoe): Refactor when v2 is released."' />
-	</module>
-
-	<module name="RegexpSingleline">
-		<!-- Checks that TODOs are named. (Actually, just that they are followed 
-			by an open paren.) -->
-		<property name="format" value="((//.*)|(\*.*))TODO[^(]" />
-		<property name="message"
-			value='All TODOs should be named.  e.g. "TODO(johndoe): Refactor when v2 is released."' />
-	</module>
-
-	<!-- <module name="JavadocPackage"> - Checks that each Java package has 
-		a Javadoc file used for commenting. Only allows a package-info.java, not 
-		package.html. </module> -->
-	<!-- All Java AST specific tests live under TreeWalker module. -->
-	<module name="TreeWalker">
-
-		<!-- IMPORT CHECKS -->
-
-		<module name="RedundantImport">
-			<!-- Checks for redundant import statements. -->
-			<property name="severity" value="error" />
-		</module>
-		<!-- <module name="ImportOrder"> Checks for out of order import statements 
-			<property name="severity" value="warning"/> <property name="groups" value="com.google,android,junit,net,org,java,javax"/> 
-			This ensures that static imports go first <property name="option" value="top"/> 
-			<property name="tokens" value="STATIC_IMPORT, IMPORT"/> </module> -->
-		<!-- JAVADOC CHECKS -->
-
-		<!-- Checks for Javadoc comments. -->
-		<!-- See http://checkstyle.sf.net/config_javadoc.html -->
-		<module name="JavadocMethod">
-			<property name="scope" value="protected" />
-			<property name="severity" value="warning" />
-			<property name="allowMissingJavadoc" value="true" />
-			<property name="allowMissingParamTags" value="true" />
-			<property name="allowMissingReturnTag" value="true" />
-			<property name="allowMissingThrowsTags" value="true" />
-			<property name="allowThrowsTagsForSubclasses" value="true" />
-			<property name="allowUndeclaredRTE" value="true" />
-		</module>
-
-		<module name="JavadocType">
-			<property name="scope" value="protected" />
-			<property name="severity" value="error" />
-		</module>
-
-		<module name="JavadocStyle">
-			<property name="severity" value="warning" />
-		</module>
-
-		<!-- NAMING CHECKS -->
-
-		<!-- Item 38 - Adhere to generally accepted naming conventions -->
-
-		<module name="PackageName">
-			<!-- Validates identifiers for package names against the supplied expression. -->
-			<!-- Here the default checkstyle rule restricts package name parts to 
-				seven characters, this is not in line with common practice at Google. -->
-			<property name="format" value="^[a-z]+(\.[a-z][a-z0-9]{1,})*$" />
-			<property name="severity" value="warning" />
-		</module>
-
-		<module name="TypeNameCheck">
-			<!-- Validates static, final fields against the expression "^[A-Z][a-zA-Z0-9]*$". -->
-			<metadata name="altname" value="TypeName" />
-			<property name="severity" value="warning" />
-		</module>
-
-		<module name="ConstantNameCheck">
-			<!-- Validates non-private, static, final fields against the supplied 
-				public/package final fields "^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$". -->
-			<metadata name="altname" value="ConstantName" />
-			<property name="applyToPublic" value="true" />
-			<property name="applyToProtected" value="true" />
-			<property name="applyToPackage" value="true" />
-			<property name="applyToPrivate" value="false" />
-			<property name="format" value="^([A-Z][A-Z0-9]*(_[A-Z0-9]+)*|FLAG_.*)$" />
-			<message key="name.invalidPattern"
-				value="Variable ''{0}'' should be in ALL_CAPS (if it is a constant) or be private (otherwise)." />
-			<property name="severity" value="warning" />
-		</module>
-
-		<module name="StaticVariableNameCheck">
-			<!-- Validates static, non-final fields against the supplied expression 
-				"^[a-z][a-zA-Z0-9]*_?$". -->
-			<metadata name="altname" value="StaticVariableName" />
-			<property name="applyToPublic" value="true" />
-			<property name="applyToProtected" value="true" />
-			<property name="applyToPackage" value="true" />
-			<property name="applyToPrivate" value="true" />
-			<property name="format" value="^[a-z][a-zA-Z0-9]*_?$" />
-			<property name="severity" value="warning" />
-		</module>
-
-		<module name="MemberNameCheck">
-			<!-- Validates non-static members against the supplied expression. -->
-			<metadata name="altname" value="MemberName" />
-			<property name="applyToPublic" value="true" />
-			<property name="applyToProtected" value="true" />
-			<property name="applyToPackage" value="true" />
-			<property name="applyToPrivate" value="true" />
-			<property name="format" value="^[a-z][a-zA-Z0-9]*$" />
-			<property name="severity" value="warning" />
-		</module>
-
-		<module name="MethodNameCheck">
-			<!-- Validates identifiers for method names. -->
-			<metadata name="altname" value="MethodName" />
-			<property name="format" value="^[a-z][a-zA-Z0-9]*(_[a-zA-Z0-9]+)*$" />
-			<property name="severity" value="warning" />
-		</module>
-
-		<module name="ParameterName">
-			<!-- Validates identifiers for method parameters against the expression 
-				"^[a-z][a-zA-Z0-9]*$". -->
-			<property name="severity" value="warning" />
-		</module>
-
-		<module name="LocalFinalVariableName">
-			<!-- Validates identifiers for local final variables against the expression 
-				"^[a-z][a-zA-Z0-9]*$". -->
-			<property name="severity" value="warning" />
-		</module>
-
-		<module name="LocalVariableName">
-			<!-- Validates identifiers for local variables against the expression 
-				"^[a-z][a-zA-Z0-9]*$". -->
-			<property name="severity" value="warning" />
-		</module>
-
-
-		<!-- LENGTH and CODING CHECKS -->
-
-		<module name="LineLength">
-			<!-- Checks if a line is too long. -->
-			<property name="max"
-				value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.max}"
-				default="100" />
-			<property name="severity" value="error" />
-
-			<!-- The default ignore pattern exempts the following elements: - import 
-				statements - long URLs inside comments -->
-
-			<property name="ignorePattern"
-				value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.ignorePattern}"
-				default="^(package .*;\s*)|(import .*;\s*)|( *\* *https?://.*)$" />
-		</module>
-
-		<module name="LeftCurly">
-			<!-- Checks for placement of the left curly brace ('{'). -->
-			<property name="severity" value="warning" />
-		</module>
-
-		<module name="RightCurly">
-			<!-- Checks right curlies on CATCH, ELSE, and TRY blocks are on the same 
-				line. e.g., the following example is fine: <pre> if { ... } else </pre> -->
-			<!-- This next example is not fine: <pre> if { ... } else </pre> -->
-			<property name="option" value="same" />
-			<property name="severity" value="warning" />
-		</module>
-
-		<!-- Checks for braces around if and else blocks -->
-		<module name="NeedBraces">
-			<property name="severity" value="warning" />
-			<property name="tokens"
-				value="LITERAL_IF, LITERAL_ELSE, LITERAL_FOR, LITERAL_WHILE, LITERAL_DO" />
-		</module>
-
-		<module name="UpperEll">
-			<!-- Checks that long constants are defined with an upper ell. -->
-			<property name="severity" value="error" />
-		</module>
-
-		<module name="FallThrough">
-			<!-- Warn about falling through to the next case statement. Similar to 
-				javac -Xlint:fallthrough, but the check is suppressed if a single-line comment 
-				on the last non-blank line preceding the fallen-into case contains 'fall 
-				through' (or some other variants which we don't publicized to promote consistency). -->
-			<property name="reliefPattern"
-				value="fall through|Fall through|fallthru|Fallthru|falls through|Falls through|fallthrough|Fallthrough|No break|NO break|no break|continue on" />
-			<property name="severity" value="error" />
-		</module>
-
-
-		<!-- MODIFIERS CHECKS -->
-
-		<module name="ModifierOrder">
-			<!-- Warn if modifier order is inconsistent with JLS3 8.1.1, 8.3.1, and 
-				8.4.3. The prescribed order is: public, protected, private, abstract, static, 
-				final, transient, volatile, synchronized, native, strictfp -->
-		</module>
-
-
-		<!-- WHITESPACE CHECKS -->
-
-		<module name="WhitespaceAround">
-			<!-- Checks that various tokens are surrounded by whitespace. This includes 
-				most binary operators and keywords followed by regular or curly braces. -->
-			<property name="tokens"
-				value="ASSIGN, BAND, BAND_ASSIGN, BOR,
-        BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR, BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN,
-        EQUAL, GE, GT, LAND, LE, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE,
-        LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF, LITERAL_RETURN,
-        LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS,
-        MINUS_ASSIGN, MOD, MOD_ASSIGN, NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION,
-        SL, SL_ASSIGN, SR_ASSIGN, STAR, STAR_ASSIGN" />
-			<property name="severity" value="error" />
-		</module>
-
-		<module name="WhitespaceAfter">
-			<!-- Checks that commas, semicolons and typecasts are followed by whitespace. -->
-			<property name="tokens" value="COMMA, SEMI, TYPECAST" />
-		</module>
-
-		<module name="NoWhitespaceAfter">
-			<!-- Checks that there is no whitespace after various unary operators. 
-				Linebreaks are allowed. -->
-			<property name="tokens"
-				value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS,
-        UNARY_PLUS" />
-			<property name="allowLineBreaks" value="true" />
-			<property name="severity" value="error" />
-		</module>
-
-		<module name="NoWhitespaceBefore">
-			<!-- Checks that there is no whitespace before various unary operators. 
-				Linebreaks are allowed. -->
-			<property name="tokens" value="SEMI, DOT, POST_DEC, POST_INC" />
-			<property name="allowLineBreaks" value="true" />
-			<property name="severity" value="error" />
-		</module>
-
-		<module name="ParenPad">
-			<!-- Checks that there is no whitespace before close parens or after open 
-				parens. -->
-			<property name="severity" value="warning" />
-		</module>
-
-                <module name="Indentation">
-                        <!-- Checks code indentation -->
-                        <property name="basicOffset" value="2" />
-                </module>
-	</module>
-</module>
diff --git a/r/_tools/scalastyle.xml b/r/_tools/scalastyle.xml
deleted file mode 100644
index f7bb0d4..0000000
--- a/r/_tools/scalastyle.xml
+++ /dev/null
@@ -1,146 +0,0 @@
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~    http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-<!-- NOTE: This was taken and adapted from Apache Spark. -->
-
-<!-- If you wish to turn off checking for a section of code, you can put a comment in the source
- before and after the section, with the following syntax: -->
-<!-- // scalastyle:off -->
-<!-- ... -->
-<!-- // naughty stuff -->
-<!-- ... -->
-<!-- // scalastyle:on -->
-
-<scalastyle>
- <name>Scalastyle standard configuration</name>
- <check level="error" class="org.scalastyle.file.FileTabChecker" enabled="true"></check>
- <!-- <check level="error" class="org.scalastyle.file.FileLengthChecker" enabled="true"> -->
- <!--  <parameters> -->
- <!--   <parameter name="maxFileLength"><![CDATA[800]]></parameter> -->
- <!--  </parameters> -->
- <!-- </check> -->
- <check level="error" class="org.scalastyle.file.HeaderMatchesChecker" enabled="true">
-  <parameters>
-      <parameter name="header"><![CDATA[/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */]]></parameter>
-  </parameters>
- </check>
- <check level="error" class="org.scalastyle.scalariform.SpacesAfterPlusChecker" enabled="true"></check>
- <check level="error" class="org.scalastyle.file.WhitespaceEndOfLineChecker" enabled="false"></check>
- <check level="error" class="org.scalastyle.scalariform.SpacesBeforePlusChecker" enabled="true"></check>
- <check level="error" class="org.scalastyle.file.FileLineLengthChecker" enabled="true">
-  <parameters>
-   <parameter name="maxLineLength"><![CDATA[100]]></parameter>
-   <parameter name="tabSize"><![CDATA[2]]></parameter>
-   <parameter name="ignoreImports">true</parameter>
-  </parameters>
- </check>
- <check level="error" class="org.scalastyle.scalariform.ClassNamesChecker" enabled="true">
-  <parameters>
-   <parameter name="regex"><![CDATA[[A-Z][A-Za-z]*]]></parameter>
-  </parameters>
- </check>
- <check level="error" class="org.scalastyle.scalariform.ObjectNamesChecker" enabled="true">
-  <parameters>
-   <parameter name="regex"><![CDATA[[A-Z][A-Za-z]*]]></parameter>
-  </parameters>
- </check>
- <check level="error" class="org.scalastyle.scalariform.PackageObjectNamesChecker" enabled="true">
-  <parameters>
-   <parameter name="regex"><![CDATA[^[a-z][A-Za-z]*$]]></parameter>
-  </parameters>
- </check>
- <check level="error" class="org.scalastyle.scalariform.EqualsHashCodeChecker" enabled="false"></check>
- <!-- <check level="error" class="org.scalastyle.scalariform.IllegalImportsChecker" enabled="true"> -->
- <!--  <parameters> -->
- <!--   <parameter name="illegalImports"><![CDATA[sun._,java.awt._]]></parameter> -->
- <!--  </parameters> -->
- <!-- </check> -->
- <check level="error" class="org.scalastyle.scalariform.ParameterNumberChecker" enabled="true">
-  <parameters>
-   <parameter name="maxParameters"><![CDATA[10]]></parameter>
-  </parameters>
- </check>
- <!-- <check level="error" class="org.scalastyle.scalariform.MagicNumberChecker" enabled="true"> -->
- <!--  <parameters> -->
- <!--   <parameter name="ignore"><![CDATA[-1,0,1,2,3]]></parameter> -->
- <!--  </parameters> -->
- <!-- </check> -->
- <check level="error" class="org.scalastyle.scalariform.NoWhitespaceBeforeLeftBracketChecker" enabled="false"></check>
- <check level="error" class="org.scalastyle.scalariform.NoWhitespaceAfterLeftBracketChecker" enabled="false"></check>
- <!-- <check level="error" class="org.scalastyle.scalariform.ReturnChecker" enabled="true"></check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.NullChecker" enabled="true"></check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.NoCloneChecker" enabled="true"></check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.NoFinalizeChecker" enabled="true"></check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.CovariantEqualsChecker" enabled="true"></check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.StructuralTypeChecker" enabled="true"></check> -->
- <!-- <check level="error" class="org.scalastyle.file.RegexChecker" enabled="true"> -->
- <!--  <parameters> -->
- <!--   <parameter name="regex"><![CDATA[println]]></parameter> -->
- <!--  </parameters> -->
- <!-- </check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.NumberOfTypesChecker" enabled="true"> -->
- <!--  <parameters> -->
- <!--   <parameter name="maxTypes"><![CDATA[30]]></parameter> -->
- <!--  </parameters> -->
- <!-- </check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.CyclomaticComplexityChecker" enabled="true"> -->
- <!--  <parameters> -->
- <!--   <parameter name="maximum"><![CDATA[10]]></parameter> -->
- <!--  </parameters> -->
- <!-- </check> -->
- <check level="error" class="org.scalastyle.scalariform.UppercaseLChecker" enabled="true"></check>
- <check level="error" class="org.scalastyle.scalariform.SimplifyBooleanExpressionChecker" enabled="false"></check>
- <check level="error" class="org.scalastyle.scalariform.IfBraceChecker" enabled="true">
-  <parameters>
-   <parameter name="singleLineAllowed"><![CDATA[true]]></parameter>
-   <parameter name="doubleLineAllowed"><![CDATA[true]]></parameter>
-  </parameters>
- </check>
- <!-- <check level="error" class="org.scalastyle.scalariform.MethodLengthChecker" enabled="true"> -->
- <!--  <parameters> -->
- <!--   <parameter name="maxLength"><![CDATA[50]]></parameter> -->
- <!--  </parameters> -->
- <!-- </check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.MethodNamesChecker" enabled="true"> -->
- <!--  <parameters> -->
- <!--   <parameter name="regex"><![CDATA[^[a-z][A-Za-z0-9]*$]]></parameter> -->
- <!--  </parameters> -->
- <!-- </check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.NumberOfMethodsInTypeChecker" enabled="true"> -->
- <!--  <parameters> -->
- <!--   <parameter name="maxMethods"><![CDATA[30]]></parameter> -->
- <!--  </parameters> -->
- <!-- </check> -->
- <!-- <check level="error" class="org.scalastyle.scalariform.PublicMethodsHaveTypeChecker" enabled="true"></check> -->
- <check level="error" class="org.scalastyle.file.NewLineAtEofChecker" enabled="true"></check>
- <check level="error" class="org.scalastyle.file.NoNewLineAtEofChecker" enabled="false"></check>
-</scalastyle>
diff --git a/r/pom.xml b/r/pom.xml
deleted file mode 100644
index 46340e4..0000000
--- a/r/pom.xml
+++ /dev/null
@@ -1,351 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~    http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <artifactId>zeppelin-interpreter-parent</artifactId>
-    <groupId>org.apache.zeppelin</groupId>
-    <version>0.9.0-SNAPSHOT</version>
-    <relativePath>../zeppelin-interpreter-parent/pom.xml</relativePath>
-  </parent>
-
-  <groupId>org.apache.zeppelin</groupId>
-  <artifactId>zeppelin-zrinterpreter_${scala.binary.version}</artifactId>
-  <packaging>jar</packaging>
-  <version>0.9.0-SNAPSHOT</version>
-  <name>Zeppelin: R Interpreter</name>
-  <description>R Interpreter for Zeppelin</description>
-  <url>https://zeppelin.apache.org</url>
-
-  <properties>
-    <script.extension>.sh</script.extension>
-    <path.separator>/</path.separator>
-    <!--library versions-->
-    <spark.version>1.4.1</spark.version>
-
-    <!--test library versions-->
-    <datanucleus.rdbms.version>3.2.9</datanucleus.rdbms.version>
-    <datanucleus.apijdo.version>3.2.6</datanucleus.apijdo.version>
-    <datanucleus.core.version>3.2.10</datanucleus.core.version>
-
-    <!--plugin versions-->
-    <plugin.shade.version>2.3</plugin.shade.version>
-    <plugin.scalatest.version>1.0</plugin.scalatest.version>
-  </properties>
-
-  <dependencies>
-
-    <dependency>
-      <groupId>${project.groupId}</groupId>
-      <artifactId>spark-interpreter</artifactId>
-      <version>${project.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-core_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-repl_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-sql_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-hive_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-library</artifactId>
-      <version>${scala.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_${scala.binary.version}</artifactId>
-      <version>${scalatest.version}</version>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scalacheck</groupId>
-      <artifactId>scalacheck_${scala.binary.version}</artifactId>
-      <version>${scalacheck.version}</version>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>commons-codec</groupId>
-      <artifactId>commons-codec</artifactId>
-      <scope>compile</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.jsoup</groupId>
-      <artifactId>jsoup</artifactId>
-      <version>${jsoup.version}</version>
-      <scope>compile</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-core_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>datanucleus-core</artifactId>
-      <version>${datanucleus.core.version}</version>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>datanucleus-api-jdo</artifactId>
-      <version>${datanucleus.apijdo.version}</version>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>datanucleus-rdbms</artifactId>
-      <version>${datanucleus.rdbms.version}</version>
-      <scope>test</scope>
-    </dependency>
-
-  </dependencies>
-
-  <build>
-    <plugins>
-
-      <plugin>
-        <artifactId>maven-enforcer-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>enforce</id>
-            <phase>none</phase>
-          </execution>
-        </executions>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <forkCount>1</forkCount>
-          <reuseForks>false</reuseForks>
-          <argLine>-Xmx1024m -XX:MaxMetaspaceSize=512m</argLine>
-          <skipTests>true</skipTests>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.scalatest</groupId>
-        <artifactId>scalatest-maven-plugin</artifactId>
-        <configuration>
-          <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <junitxml>.</junitxml>
-          <filereports>testoutput.txt</filereports>
-          <parallel>false</parallel>
-          <forkCount>1</forkCount>
-          <reuseForks>true</reuseForks>
-          <systemProperties>
-            <scala.usejavacp>true</scala.usejavacp>
-          </systemProperties>
-        </configuration>
-        <executions>
-          <execution>
-            <id>test</id>
-            <goals>
-              <goal>test</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-shade-plugin</artifactId>
-        <configuration>
-          <filters>
-            <filter>
-              <artifact>*:*</artifact>
-              <excludes>
-                <exclude>org/datanucleus/**</exclude>
-                <exclude>META-INF/*.SF</exclude>
-                <exclude>META-INF/*.DSA</exclude>
-                <exclude>META-INF/*.RSA</exclude>
-              </excludes>
-            </filter>
-          </filters>
-          <transformers>
-            <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
-            <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
-              <resource>reference.conf</resource>
-            </transformer>
-          </transformers>
-        </configuration>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-
-      <!-- Deploy datanucleus jars to the interpreter/spark directory -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>copy</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>${project.build.directory}/../../interpreter/spark</outputDirectory>
-              <overWriteReleases>false</overWriteReleases>
-              <overWriteSnapshots>false</overWriteSnapshots>
-              <overWriteIfNewer>true</overWriteIfNewer>
-              <artifactItems>
-                <artifactItem>
-                  <groupId>${project.groupId}</groupId>
-                  <artifactId>${project.artifactId}</artifactId>
-                  <version>${project.version}</version>
-                  <type>${project.packaging}</type>
-                </artifactItem>
-              </artifactItems>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-
-      <!-- Plugin to compile Scala code -->
-      <plugin>
-        <groupId>org.scala-tools</groupId>
-        <artifactId>maven-scala-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>compile</id>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-            <phase>compile</phase>
-          </execution>
-          <execution>
-            <id>test-compile</id>
-            <goals>
-              <goal>testCompile</goal>
-            </goals>
-            <phase>test-compile</phase>
-          </execution>
-          <execution>
-            <phase>process-resources</phase>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>exec-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>compile</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
-          </execution>
-        </executions>
-        <configuration>
-          <executable>R${path.separator}install-dev${script.extension}</executable>
-        </configuration>
-      </plugin>
-      <plugin>
-        <artifactId>maven-clean-plugin</artifactId>
-        <configuration>
-          <filesets>
-            <fileset>
-              <directory>${project.build.directory}/../../R</directory>
-              <includes>
-                <include>**/lib/**</include>
-              </includes>
-            </fileset>
-            <fileset>
-              <directory>${project.build.directory}/../../interpreter/spark</directory>
-              <includes>
-                <include>**/zeppelin-zr*.jar</include>
-              </includes>
-            </fileset>
-          </filesets>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-
-  <profiles>
-    <profile>
-      <id>scala-2.10</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <properties>
-        <extra.source.dir>src/main/scala-2.10</extra.source.dir>
-        <extra.testsource.dir>src/test/scala-2.10</extra.testsource.dir>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>scala-2.11</id>
-      <properties>
-        <extra.source.dir>src/main/scala-2.11</extra.source.dir>
-        <extra.testsource.dir>src/test/scala/scala-2.11</extra.testsource.dir>
-      </properties>
-    </profile>
-  </profiles>
-</project>
diff --git a/r/src/main/java/org/apache/zeppelin/rinterpreter/KnitR.java b/r/src/main/java/org/apache/zeppelin/rinterpreter/KnitR.java
deleted file mode 100644
index ab29efe..0000000
--- a/r/src/main/java/org/apache/zeppelin/rinterpreter/KnitR.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.rinterpreter;
-
-import org.apache.zeppelin.interpreter.*;
-import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
-import org.apache.zeppelin.scheduler.Scheduler;
-
-import java.net.URL;
-import java.util.List;
-import java.util.Properties;
-
-/**
- * KnitR is a simple wrapper around KnitRInterpreter to handle that Zeppelin prefers
- * to load interpreters through classes defined in Java with static methods that run
- * when the class is loaded.
- *
- */
-public class KnitR extends Interpreter implements WrappedInterpreter {
-  KnitRInterpreter intp;
-
-  public KnitR(Properties properties, Boolean startSpark) {
-    super(properties);
-    intp = new KnitRInterpreter(properties, startSpark);
-  }
-  public KnitR(Properties properties) {
-    this(properties, true);
-  }
-
-  public KnitR() {
-    this(new Properties());
-  }
-
-  @Override
-  public void open() throws InterpreterException {
-    intp.open();
-  }
-
-  @Override
-  public void close() throws InterpreterException {
-    intp.close();
-  }
-
-  @Override
-  public InterpreterResult interpret(String s, InterpreterContext interpreterContext)
-      throws InterpreterException {
-    return intp.interpret(s, interpreterContext);
-  }
-
-  @Override
-  public void cancel(InterpreterContext interpreterContext) throws InterpreterException {
-    intp.cancel(interpreterContext);
-  }
-
-  @Override
-  public FormType getFormType() throws InterpreterException {
-    return intp.getFormType();
-  }
-
-  @Override
-  public int getProgress(InterpreterContext interpreterContext) throws InterpreterException {
-    return intp.getProgress(interpreterContext);
-  }
-
-  @Override
-  public List<InterpreterCompletion> completion(String s, int i,
-      InterpreterContext interpreterContext) throws InterpreterException {
-    List completion = intp.completion(s, i, interpreterContext);
-    return completion;
-  }
-
-  @Override
-  public Interpreter getInnerInterpreter() {
-    return intp;
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    return intp.getScheduler();
-  }
-
-  @Override
-  public void setProperties(Properties properties) {
-    super.setProperties(properties);
-    intp.setProperties(properties);
-  }
-
-  @Override
-  public Properties getProperties() {
-    return intp.getProperties();
-  }
-
-  @Override
-  public String getProperty(String key) {
-    return intp.getProperty(key);
-  }
-
-  @Override
-  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
-    super.setInterpreterGroup(interpreterGroup);
-    intp.setInterpreterGroup(interpreterGroup);
-  }
-
-  @Override
-  public InterpreterGroup getInterpreterGroup() {
-    return intp.getInterpreterGroup();
-  }
-
-  @Override
-  public void setClassloaderUrls(URL[] classloaderUrls) {
-    intp.setClassloaderUrls(classloaderUrls);
-  }
-
-  @Override
-  public URL[] getClassloaderUrls() {
-    return intp.getClassloaderUrls();
-  }
-}
diff --git a/r/src/main/java/org/apache/zeppelin/rinterpreter/RRepl.java b/r/src/main/java/org/apache/zeppelin/rinterpreter/RRepl.java
deleted file mode 100644
index bdf7dae..0000000
--- a/r/src/main/java/org/apache/zeppelin/rinterpreter/RRepl.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.rinterpreter;
-
-import org.apache.zeppelin.interpreter.*;
-import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
-import org.apache.zeppelin.scheduler.Scheduler;
-
-import java.net.URL;
-import java.util.List;
-import java.util.Properties;
-
-/**
- * RRepl is a simple wrapper around RReplInterpreter to handle that Zeppelin prefers
- * to load interpreters through classes defined in Java with static methods that run
- * when the class is loaded.
- *
- */
-public class RRepl extends Interpreter implements WrappedInterpreter {
-  RReplInterpreter intp;
-
-  public RRepl(Properties properties, Boolean startSpark) {
-    super(properties);
-    intp = new RReplInterpreter(properties, startSpark);
-  }
-  public RRepl(Properties properties) {
-    this(properties, true);
-  }
-
-  public RRepl() {
-    this(new Properties());
-  }
-
-  @Override
-  public void open() throws InterpreterException {
-    intp.open();
-  }
-
-  @Override
-  public void close() throws InterpreterException {
-    intp.close();
-  }
-
-  @Override
-  public InterpreterResult interpret(String s, InterpreterContext interpreterContext)
-      throws InterpreterException {
-    return intp.interpret(s, interpreterContext);
-  }
-
-  @Override
-  public void cancel(InterpreterContext interpreterContext) throws InterpreterException {
-    intp.cancel(interpreterContext);
-  }
-
-  @Override
-  public FormType getFormType() throws InterpreterException {
-    return intp.getFormType();
-  }
-
-  @Override
-  public int getProgress(InterpreterContext interpreterContext) throws InterpreterException {
-    return intp.getProgress(interpreterContext);
-  }
-
-  @Override
-  public List<InterpreterCompletion> completion(String s, int i,
-      InterpreterContext interpreterContext) throws InterpreterException {
-    List completion = intp.completion(s, i, interpreterContext);
-    return completion;
-  }
-
-  @Override
-  public Interpreter getInnerInterpreter() {
-    return intp;
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    return intp.getScheduler();
-  }
-
-  @Override
-  public void setProperties(Properties properties) {
-    super.setProperties(properties);
-    intp.setProperties(properties);
-  }
-
-  @Override
-  public Properties getProperties() {
-    return intp.getProperties();
-  }
-
-  @Override
-  public String getProperty(String key) {
-    return intp.getProperty(key);
-  }
-
-  @Override
-  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
-    super.setInterpreterGroup(interpreterGroup);
-    intp.setInterpreterGroup(interpreterGroup);
-  }
-
-  @Override
-  public InterpreterGroup getInterpreterGroup() {
-    return intp.getInterpreterGroup();
-  }
-
-  @Override
-  public void setClassloaderUrls(URL[] classloaderUrls) {
-    intp.setClassloaderUrls(classloaderUrls);
-  }
-
-  @Override
-  public URL[] getClassloaderUrls() {
-    return intp.getClassloaderUrls();
-  }
-}
diff --git a/r/src/main/java/org/apache/zeppelin/rinterpreter/RStatics.java b/r/src/main/java/org/apache/zeppelin/rinterpreter/RStatics.java
deleted file mode 100644
index 1ea35ce..0000000
--- a/r/src/main/java/org/apache/zeppelin/rinterpreter/RStatics.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
-The purpose of this class is to provide something for R to call through the backend
-to bootstrap.
- */
-
-package org.apache.zeppelin.rinterpreter;
-
-import org.apache.spark.SparkContext;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.spark.sql.SQLContext;
-import org.apache.zeppelin.spark.SparkZeppelinContext;
-
-/**
- * RStatics provides static class methods that can be accessed through the SparkR bridge
- *
- */
-public class RStatics {
-  private static SparkContext sc = null;
-  private static SparkZeppelinContext z = null;
-  private static SQLContext sql = null;
-  private static RContext rCon = null;
-
-  public static SparkContext setSC(SparkContext newSC) {
-    sc = newSC;
-    return sc;
-  }
-
-  public static SparkZeppelinContext setZ(SparkZeppelinContext newZ) {
-    z = newZ;
-    return z;
-  }
-
-  public static SQLContext setSQL(SQLContext newSQL) {
-    sql = newSQL;
-    return sql;
-  }
-
-  public static JavaSparkContext getJSC() {
-    return new JavaSparkContext(sc);
-  }
-
-  public static SparkContext getSC() {
-    return sc;
-  }
-
-  public static SQLContext getSQL() {
-    return sql;
-  }
-
-  public static Object getZ(String name) {
-    return z.get(name);
-  }
-
-  public static void putZ(String name, Object obj) {
-    z.put(name, obj);
-  }
-
-  public static RContext getRCon() {
-    return rCon;
-  }
-  public static RContext setrCon(RContext newrCon) {
-    rCon = newrCon;
-    return rCon;
-  }
-  public static Boolean testRDD(String name) {
-    Object x = z.get(name);
-    return (x instanceof org.apache.spark.api.java.JavaRDD);
-  }
-}
diff --git a/r/src/main/resources/interpreter-setting.json b/r/src/main/resources/interpreter-setting.json
deleted file mode 100644
index c5997a3..0000000
--- a/r/src/main/resources/interpreter-setting.json
+++ /dev/null
@@ -1,61 +0,0 @@
-[
-  {
-    "group": "spark",
-    "name": "r",
-    "className": "org.apache.zeppelin.rinterpreter.RRepl",
-    "properties": {
-      "rhadoop.cmd": {
-        "envName": "HADOOP_CMD",
-        "defaultValue": "",
-        "type": "textarea"
-      },
-      "rhadooop.streamingjar": {
-        "envName": "HADOOP_STREAMING",
-        "defaultValue": "",
-        "type": "textarea"
-      },
-      "rscala.debug": {
-        "envName": "RSCALA_DEBUG",
-        "defaultValue": false,
-        "type": "checkbox"
-      },
-      "rscala.timeout": {
-        "envName": "RSCALA_TIMEOUT",
-        "defaultValue": "60",
-        "type": "number"
-      }
-    },
-    "editor": {
-      "language": "r",
-      "editOnDblClick": false,
-      "completionKey": "TAB"
-    }
-  },
-  {
-    "group": "spark",
-    "name": "knitr",
-    "className": "org.apache.zeppelin.rinterpreter.RRepl",
-    "properties": {
-      "rhadoop.cmd": {
-        "envName": "HADOOP_CMD",
-        "defaultValue": "",
-        "type": "textarea"
-      },
-      "rhadooop.streamingjar": {
-        "envName": "HADOOP_STREAMING",
-        "defaultValue": "",
-        "type": "textarea"
-      },
-      "rscala.debug": {
-        "envName": "RSCALA_DEBUG",
-        "defaultValue": false,
-        "type": "checkbox"
-      },
-      "rscala.timeout": {
-        "envName": "RSCALA_TIMEOUT",
-        "defaultValue": "60",
-        "type": "number"
-      }
-    }
-  }
-]
diff --git a/r/src/main/scala/org/apache/spark/api/r/RBackendHelper.scala b/r/src/main/scala/org/apache/spark/api/r/RBackendHelper.scala
deleted file mode 100644
index 9c1eb38..0000000
--- a/r/src/main/scala/org/apache/spark/api/r/RBackendHelper.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
-With grattitude to Shivaram for advice regarding how to get SparkR talking to an existing SparkContext in Java
- */
-package org.apache.spark.api.r
-
-class RBackendHelper(val backend : RBackend) {
-
-
-
-  def close() : Unit = backend.close()
-
-
-  var port : Int = 0
-
-  def init() : Int = {
-    port = backend.init()
-    port
-  }
-
-  val backendThread : Thread = new Thread("SparkR backend") {
-    override def run() {
-      backend.run()
-    }
-  }
-
-  def start() : Thread = {
-    if (port == 0) throw new RuntimeException("BackendHelper must be initialized before starting")
-    if (!backendThread.isAlive) backendThread.start()
-    backendThread
-  }
-
-
-/*
-The sequence is:
-1.  Before initializing spark in R, after loading library, Backend goes up and starts listening.  (Note that its able to execute arbitrary methods!!!  We can use it for
-zeppelin context!!!)
-2.  Tell SparkR to make a connection to the backend, setting the EXISTING port to the one in backendhelper.
-3.  Track sparkR.init, but where it calls spark/R/pkg/R/sparkR.R calls org.apache.spark.api.r.RRDD.createSparkContext to get sc,
-which is then returned as a jobj link, instead call RBackendHelper.getSC
-  3a Actually the object returned right now is of type JavaSparkContext ?????  Need to understand this
-4.  SparkR for the other contexts calls related methods, org.apache.spark.sql.api.r.SQLUtils.createSQLContext and
-org.apache.spark.sql.hive.HiveContext is just made new, with the jobj reference assigned to an object.  We should track
-the same pattern as above.
-
-
- */
-}
-
-
-object RBackendHelper {
-
-/*
-This function creates a new SparkContext, but does not register it, based on whatever properties are provided.
-Its for testing purposes and should never be called
- */
-//  def buildSparkContext( props : Properties) : SparkContext = {
-//    val traversableProps : Traversable[(String, String)] = propertiesAsScalaMap(props)
-//    val conf = new SparkConf().setAll(traversableProps)
-//    conf.setIfMissing("spark.master", "local")
-//  conf.setIfMissing("spark.app.name", "ZeppelinRContext")
-//    conf.validateSettings()
-//    new SparkContext(conf)
-//  }
-
-  def apply() : RBackendHelper = new RBackendHelper(new RBackend())
-
-}
\ No newline at end of file
diff --git a/r/src/main/scala/org/apache/zeppelin/rinterpreter/KnitRInterpreter.scala b/r/src/main/scala/org/apache/zeppelin/rinterpreter/KnitRInterpreter.scala
deleted file mode 100644
index 64b1d26..0000000
--- a/r/src/main/scala/org/apache/zeppelin/rinterpreter/KnitRInterpreter.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.rinterpreter
-
-
-// TODO:  Capture the knitr progress bar
-
-import java.util._
-
-import org.apache.zeppelin.interpreter.InterpreterContext
-import org.apache.zeppelin.interpreter.InterpreterResult
-import org.apache.zeppelin.rinterpreter.rscala.RException
-
-
-class KnitRInterpreter(properties: Properties, startSpark : Boolean = true) extends RInterpreter(properties, startSpark) {
-  def this(properties : Properties) = {
-    this(properties, true)
-  }
-
-  override def open: Unit = {
-    logger.trace("Opening knitr")
-    rContext.synchronized {
-      super.open
-      logger.debug("Knitr open, initial commands")
-      rContext.testRPackage("knitr", true, true, "Without knitr, the knitr interpreter cannot run.")
-      rContext.eval(
-        """opts_knit$set(out.format = 'html',
-          |results='asis',
-          |progress = FALSE,
-          |self.contained = TRUE,
-          |verbose = FALSE,
-          |comment = NA,
-          |echo = FALSE,
-          |tidy = FALSE)
-          | """.stripMargin)
-    }
-    logger.info("KnitR:  Finished initial commands")
-  }
-
-  def interpret(st: String, context: InterpreterContext): InterpreterResult = try {
-    logger.trace("interpreting" + st)
-    // need to convert st into an array of Strings within R
-    val commandSt : Array[String] = st.split("\n")
-    val chunkOptions = commandSt.head
-    val chunkLine : String = s"```{r $chunkOptions}"
-    val chunk : Array[String] = Array(chunkLine) ++: commandSt.tail ++: Array("```")
-    val out: String = rContext.synchronized {
-      rContext.set(".zeppknitrinput", chunk)
-      rContext.eval(".knitout <- knit2html(text=.zeppknitrinput, envir = rzeppelin:::.zeppenv)")
-      rContext.getS0(".knitout")
-    }
-
-    new InterpreterResult(InterpreterResult.Code.SUCCESS,
-      InterpreterResult.Type.HTML,
-      RInterpreter.processHTML(out)
-    )
-  } catch {
-    case r: RException => r.getInterpreterResult(st)
-    case e: Exception => new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage())
-  }
-}
-
diff --git a/r/src/main/scala/org/apache/zeppelin/rinterpreter/RContext.scala b/r/src/main/scala/org/apache/zeppelin/rinterpreter/RContext.scala
deleted file mode 100644
index 4ad65cd..0000000
--- a/r/src/main/scala/org/apache/zeppelin/rinterpreter/RContext.scala
+++ /dev/null
@@ -1,320 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.rinterpreter
-
-import java.io._
-import java.nio.file.{Files, Paths}
-import java.util.Properties
-
-import org.apache.spark.SparkContext
-import org.apache.spark.api.r.RBackendHelper
-import org.apache.spark.sql.SQLContext
-import org.apache.zeppelin.interpreter._
-import org.apache.zeppelin.rinterpreter.rscala.RClient._
-import org.apache.zeppelin.rinterpreter.rscala._
-import org.apache.zeppelin.scheduler._
-import org.apache.zeppelin.spark.{SparkInterpreter, SparkZeppelinContext}
-import org.slf4j._
-
-import scala.collection.JavaConversions._
-
-// TODO:  Setup rmr, etc.
-// TODO:  Stress-test spark.  What happens on close?  Etc.
-
-private[rinterpreter] class RContext(private val sockets: ScalaSockets,
-                                     debug: Boolean) extends RClient(sockets.in, sockets.out, debug) {
-
-  private val logger: Logger = RContext.logger
-  lazy val getScheduler: Scheduler = SchedulerFactory.singleton().createOrGetFIFOScheduler(this.hashCode().toString)
-
-  val backend: RBackendHelper = RBackendHelper()
-  private var sc: Option[SparkContext] = None
-  private var sql: Option[SQLContext] = None
-  private var z: Option[SparkZeppelinContext] = None
-
-  val rPkgMatrix = collection.mutable.HashMap[String,Boolean]()
-
-  var isOpen: Boolean = false
-  private var isFresh : Boolean = true
-
-  private var property: Properties = null
-  private[rinterpreter] var sparkRStarted : Boolean = false
-
-  override def toString() : String = s"""${super.toString()}
-       |\t Open: $isOpen Fresh: $isFresh SparkStarted: $sparkRStarted
-       |\t Progress: $progress
-       |\t Sockets: ${sockets.toString()}
-     """.stripMargin
-
-  var progress: Int = 0
-
-  def getProgress: Int = {
-    return progress
-  }
-
-  def setProgress(i: Int) : Unit = {
-    progress = i % 100
-  }
-
-  def incrementProgress(i: Int) : Unit = {
-    progress = (progress + i) % 100
-  }
-
-  // handle properties this way so it can be a mutable object shared with the R Interpreters
-  def setProperty(properties: Properties): Unit = synchronized {
-    if (property == null) property = properties
-    else property.putAll(properties)
-  }
-
-  def open(startSpark : Option[SparkInterpreter]): Unit = synchronized {
-    if (isOpen && sparkRStarted) {
-      logger.trace("Reusing rContext.")
-      return
-    }
-    testRPackage("rzeppelin", fail = true, message =
-      "The rinterpreter cannot run without the rzeppelin package, which was included in your distribution.")
-    startSpark match {
-      case Some(x : SparkInterpreter) => {
-        sparkStartup(x)
-      }
-      case _ => logger.error("Could not find a SparkInterpreter")
-    }
-    isOpen = true
-  }
-  private def sparkStartup(startSpark : SparkInterpreter): Unit = try {
-    val sparkHome: String = System.getenv("SPARK_HOME") match {
-          case null => {
-            logger.error("SPARK_HOME is not set. The R Interpreter will start without Spark.")
-            return
-          }
-          case y => y
-        }
-    testRPackage("SparkR", fail = true, path = sparkHome)
-    if (startSpark.getSparkVersion() == null) throw new RuntimeException("No spark version")
-    sc = Some(startSpark.getSparkContext())
-    sql = Some(startSpark.getSQLContext())
-    z = Some(startSpark.getZeppelinContext())
-    logger.trace("Registered Spark Contexts")
-    backend.init()
-    backend.start()
-    if (!backend.backendThread.isAlive) throw new RuntimeException("SparkR could not startup because the Backend Thread is not alive")
-    logger.trace("Started Spark Backend")
-    eval( s"""SparkR:::connectBackend("localhost", ${backend.port})""")
-    logger.trace("SparkR backend connected")
-    initializeSparkR(sc.get, sql.get, z.get)
-    logger.info("Initialized SparkR")
-    sparkRStarted = true
-  } catch {
-    case e: Exception => throw new RuntimeException("""
-      Could not connect R to Spark.  If the stack trace is not clear,
-    check whether SPARK_HOME is set properly.""", e)
-  }
-
-  private def initializeSparkR(sc : SparkContext, sql : SQLContext, z : SparkZeppelinContext) : Unit = synchronized {
-
-    logger.trace("Getting a handle to the JavaSparkContext")
-
-    eval("assign(\".scStartTime\", as.integer(Sys.time()), envir = SparkR:::.sparkREnv)")
-    RStatics.setSC(sc)
-    eval(
-      """
-        |assign(
-        |".sparkRjsc",
-        |SparkR:::callJStatic("org.apache.zeppelin.rinterpreter.RStatics",
-        | "getJSC"),
-        | envir = SparkR:::.sparkREnv)""".stripMargin)
-
-    eval("assign(\"sc\", get(\".sparkRjsc\", envir = SparkR:::.sparkREnv), envir=.GlobalEnv)")
-
-    logger.trace("Established SparkR Context")
-
-    val sqlEnvName = sql match {
-      case null => throw new RuntimeException("Tried to initialize SparkR without setting a SQLContext")
-      case x : org.apache.spark.sql.hive.HiveContext => ".sparkRHivesc"
-      case x : SQLContext => ".sparkRSQLsc"
-    }
-    RStatics.setSQL(sql)
-    eval(
-      s"""
-        |assign(
-        |"${sqlEnvName}",
-        |SparkR:::callJStatic("org.apache.zeppelin.rinterpreter.RStatics",
-        | "getSQL"),
-        | envir = SparkR:::.sparkREnv)""".stripMargin)
-    eval(
-      s"""
-         |assign("sqlContext",
-         |get("$sqlEnvName",
-         |envir = SparkR:::.sparkREnv),
-         |envir = .GlobalEnv)
-       """.stripMargin)
-
-    logger.trace("Proving spark")
-    val proof = evalS1("names(SparkR:::.sparkREnv)")
-    logger.info("Proof of spark is : " + proof.mkString)
-
-    RStatics.setZ(z)
-
-    RStatics.setrCon(this)
-    eval(
-      s"""
-         |assign(".rContext",
-         |  SparkR:::callJStatic("org.apache.zeppelin.rinterpreter.RStatics",
-         | "getRCon"),
-         | envir = .GlobalEnv)
-     """.stripMargin
-    )
-  }
-
-  def close(): Unit = synchronized {
-    if (isOpen) {
-      if (sparkRStarted) {
-        try {
-          eval("SparkR:::sparkR.stop()")
-        } catch {
-          case e: RException => {}
-          case e: Exception => logger.error("Error closing SparkR", e)
-        }
-      }
-      try {
-        backend.close
-        backend.backendThread.stop()
-      } catch {
-        case e: Exception => logger.error("Error closing RContext ", e)
-      }
-      try {
-        exit()
-      } catch {
-        case e: Exception => logger.error("Shutdown error", e)
-      }
-    }
-    isOpen = false
-  }
-
-
-  private[rinterpreter] def testRPackage(pack: String,
-                                         fail: Boolean = false,
-                                         license: Boolean = false,
-                                         message: String = "",
-                                          path : String = ""): Boolean = synchronized {
-
-
-    rPkgMatrix.get(pack) match {
-      case Some(x: Boolean) => return x
-      case None => {}
-    }
-
-    evalB0( s"""require('$pack',quietly=TRUE, lib.loc="$path/R/lib/")""") match {
-      case true => {
-        rPkgMatrix.put(pack, true)
-        return (true)
-      }
-      case false => {
-        evalB0(s"require('$pack', quietly=TRUE)") match {
-          case true => {
-            rPkgMatrix.put(pack, true)
-            return true
-          }
-          case false => {
-            rPkgMatrix.put(pack, false)
-            val failMessage =
-              s"""The $pack package could not be loaded. """ + {
-                if (license) "We cannot install it for you because it is published under the GPL3 license."
-                else ""
-              } + message
-            logger.error(failMessage)
-            if (fail) throw new RException(failMessage)
-            return (false)
-          }
-        }
-      }
-    }
-  }
-
-  logger.info("RContext Finished Starting")
-}
-
-object RContext {
-  val logger: Logger = LoggerFactory.getLogger(getClass)
-
-  logger.trace("Inside the RContext Object")
-  private val contextMap : collection.mutable.HashMap[String, RContext] = collection.mutable.HashMap[String,RContext]()
-
-  // This function is here to work around inconsistencies in the SparkInterpreter startup sequence
-  // that caused testing issues
-  private[rinterpreter] def resetRcon() : Boolean = synchronized {
-    contextMap foreach((con) => {
-      con._2.close()
-      if (con._2.isOpen) throw new RuntimeException("Failed to close an existing RContext")
-      contextMap.remove(con._1)
-    })
-    return true
-  }
-
-  def apply( property: Properties, id : String): RContext = synchronized {
-        contextMap.get(id) match {
-          case Some(x : RContext) if x.isFresh || x.isOpen => return(x)
-          case Some(x : RContext) => resetRcon()
-          case _ => {}
-        }
-        val debug: Boolean = property.getProperty("rscala.debug", "false").toBoolean
-        val timeout: Int = property.getProperty("rscala.timeout", "60").toInt
-        import scala.sys.process._
-        logger.trace("Creating processIO")
-        var cmd: PrintWriter = null
-        val command = RClient.defaultRCmd +: RClient.defaultArguments
-        val processCmd = Process(command)
-
-        val processIO = new ProcessIO(
-          o => {
-            cmd = new PrintWriter(o)
-          },
-          reader("STDOUT DEBUG: "),
-          reader("STDERR DEBUG: "),
-          true
-        )
-        val portsFile = File.createTempFile("rscala-", "")
-        val processInstance = processCmd.run(processIO)
-        // Find rzeppelin
-        val libpath : String = if (Files.exists(Paths.get("R/lib"))) "R/lib"
-        else if (Files.exists(Paths.get("../R/lib"))) "../R/lib"
-        else throw new RuntimeException("Could not find rzeppelin - it must be in either R/lib or ../R/lib")
-        val snippet =
-          s"""
-library(lib.loc="$libpath", rzeppelin)
-rzeppelin:::rServe(rzeppelin:::newSockets('${portsFile.getAbsolutePath.replaceAll(File.separator, "/")}',debug=${if (debug) "TRUE" else "FALSE"},timeout=${timeout}))
-q(save='no')"""
-        while (cmd == null) Thread.sleep(100)
-        cmd.println(snippet)
-        cmd.flush()
-        val sockets = RClient.makeSockets(portsFile.getAbsolutePath)
-        sockets.out.writeInt(RClient.Protocol.OK)
-        sockets.out.flush()
-        val packVersion = RClient.readString(sockets.in)
-        if (packVersion != org.apache.zeppelin.rinterpreter.rscala.Version) {
-          logger.warn("Connection to R started but versions don't match " + packVersion + " " + org.apache.zeppelin.rinterpreter.rscala.Version)
-        } else {
-          logger.trace("Connected to a new R Session")
-        }
-        val context = new RContext(sockets, debug)
-        context.setProperty(property)
-        contextMap.put(id, context)
-        context
-  }
-}
-
diff --git a/r/src/main/scala/org/apache/zeppelin/rinterpreter/RInterpreter.scala b/r/src/main/scala/org/apache/zeppelin/rinterpreter/RInterpreter.scala
deleted file mode 100644
index 935d526..0000000
--- a/r/src/main/scala/org/apache/zeppelin/rinterpreter/RInterpreter.scala
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.rinterpreter
-
-import java.io.{BufferedInputStream, File, FileInputStream}
-import java.nio.file.{Files, Paths}
-import java.util._
-
-import org.apache.commons.codec.binary.{Base64, StringUtils}
-import org.apache.zeppelin.interpreter.Interpreter.FormType
-import org.apache.zeppelin.interpreter.{InterpreterContext, _}
-import org.apache.zeppelin.scheduler.Scheduler
-import org.apache.zeppelin.spark.SparkInterpreter
-import org.jsoup.Jsoup
-import org.jsoup.nodes._
-import org.jsoup.select.Elements
-import org.slf4j.{Logger, LoggerFactory}
-
-import scala.collection.JavaConversions._
-import scala.io.Source
-
-abstract class RInterpreter(properties : Properties, startSpark : Boolean = true) extends Interpreter (properties) {
-
-  protected val logger: Logger = RInterpreter.logger
-  logger.trace("Initialising an RInterpreter of class " + this.getClass.getName)
-
-  def getrContext: RContext = rContext
-
-  protected lazy val rContext : RContext = synchronized{ RContext(properties, this.getInterpreterGroup().getId()) }
-
-  def open: Unit = rContext.synchronized {
-    logger.trace("RInterpreter opening")
-    // We leave this as an Option[] because the pattern of nesting SparkInterpreter inside of wrapper interpreters
-    // has changed several times, and this allows us to fail more gracefully and handle those changes in one place.
-    val intp : Option[SparkInterpreter] =  getSparkInterpreter()
-    rContext.open(intp)
-    rContext.testRPackage("htmltools", message =
-      """You can continue
-        | without it, but some interactive visualizations will fail.
-        | You can install it from cran."""")
-    rContext.testRPackage("repr", license = true, message =
-      """You can continue
-        | without it, but some forms of output from the REPL may not appear properly."""")
-    rContext.testRPackage("base64enc", license = true, message =
-      """You can continue
-        | without it, but the REPL may not show images properly.""")
-    rContext.testRPackage("evaluate", license = false, message =
-      """
-        |The REPL needs this to run.  It can be installed from CRAN
-        | Thanks to Hadley Wickham and Yihui Xie for graciously making evaluate available under an Apache-compatible
-        | license so it can be used with this project.""".stripMargin)
-  }
-
-  def getSparkInterpreter() : Option[SparkInterpreter] = {
-    val sparkInterpreter = getInterpreterInTheSameSessionByClassName(classOf[SparkInterpreter])
-    if (sparkInterpreter == null) {
-      None
-    } else {
-      Some(sparkInterpreter)
-    }
-  }
-
-  def getSparkInterpreter(p1 : Interpreter) : Option[SparkInterpreter] = p1 match {
-    case s : SparkInterpreter => Some[SparkInterpreter](s)
-    case lzy : LazyOpenInterpreter => {
-      val p = lzy.getInnerInterpreter
-      lzy.open()
-      return getSparkInterpreter(p)
-    }
-    case w : WrappedInterpreter => return getSparkInterpreter(w.getInnerInterpreter)
-    case _ => None
-  }
-
-  def close: Unit = {
-    rContext.close
-  }
-
-  def getProgress(context :InterpreterContext): Int  = rContext.getProgress
-
-  def cancel(context:InterpreterContext) : Unit = {}
-
-  def getFormType: FormType = {
-    return FormType.NONE
-  }
-
-  override def getScheduler : Scheduler = rContext.getScheduler
-
-  // TODO:  completion is disabled because it could not be tested with current Zeppelin code
-  /*def completion(buf :String,cursor : Int) : List[String] = Array[String]("").toList
-
-  private[rinterpreter] def hiddenCompletion(buf :String,cursor : Int) : List[String] =
-    rContext.evalS1(s"""
-       |rzeppelin:::.z.completion("$buf", $cursor)
-     """.stripMargin).toList*/
-}
-
-object RInterpreter {
-
-  private val logger: Logger = LoggerFactory.getLogger(getClass)
-  logger.trace("logging inside the RInterpreter singleton")
-
-  // Some R interactive visualization packages insist on producing HTML that refers to javascript
-  // or css by file path.  These functions are intended to load those files and embed them into the
-  // HTML as Base64 encoded DataURIs.
-  //FIXME These don't error but may not yet properly be converting script links
-  def scriptToBase(doc : Element, testAttr : String, tag : String, mime : String): Unit = {
-    val elems : Elements = doc.getElementsByTag(tag)
-    elems.filter( (e : Element) => {
-      e.attributes().hasKey(testAttr) && e.attr(testAttr) != "" && e.attr(testAttr).slice(0,1) == "/"
-    }).foreach(scriptToBase(_, testAttr, mime))
-  }
-
-  def scriptToBase(node : Element, field : String, mime : String) : Unit = node.attr(field) match {
-    case x if Files.exists(Paths.get(x)) => node.attr(field, dataURI(x, mime))
-    case x if x.slice(0,4) == "http" => {}
-    case x if x.contains("ajax") => {}
-    case x if x.contains("googleapis") => {}
-    case x if x.slice(0,2) == "//" => node.attr(field, "http:" + x)
-    case _ => {}
-  }
-
-  def dataURI(file : String, mime : String) : String = {
-    val fp = new File(file)
-    val fdata = new Array[Byte](fp.length().toInt)
-    val fin = new BufferedInputStream(new FileInputStream(fp))
-    try {
-      fin.read(fdata)
-    } finally {
-      fin.close()
-    }
-    s"""data:${mime};base64,""" + StringUtils.newStringUtf8(Base64.encodeBase64(fdata, false))
-  }
-
-  // The purpose here is to deal with knitr producing HTML with script and css tags outside the <body>
-  def processHTML(input: Array[String]): String = processHTML(input.mkString("\n"))
-
-  def processHTML(input: String) : String = {
-		val doc : Document = Jsoup.parse(input)
-    processHTML(doc)
-	}
-
-	private def processHTML(doc : Document) : String = {
-    val bod : Element = doc.body()
-    val head : Element = doc.head()
-    // Try to ignore the knitr script that breaks zeppelin display
-		head.getElementsByTag("script").reverseIterator.foreach(bod.prependChild(_))
-    // Only get css from head if it links to a file
-    head.getElementsByTag("link").foreach(bod.prependChild(_))
-    scriptToBase(bod, "href", "link", "text/css")
-    scriptToBase(bod, "src", "script", "text/javascript")
-    bod.html()
-	}
-}
diff --git a/r/src/main/scala/org/apache/zeppelin/rinterpreter/RReplInterpreter.scala b/r/src/main/scala/org/apache/zeppelin/rinterpreter/RReplInterpreter.scala
deleted file mode 100644
index 013ccd8..0000000
--- a/r/src/main/scala/org/apache/zeppelin/rinterpreter/RReplInterpreter.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.rinterpreter
-
-
-// TODO:  Option for setting size of output images
-
-import java.util._
-
-import org.apache.zeppelin.interpreter.InterpreterContext
-import org.apache.zeppelin.interpreter.InterpreterResult
-import org.apache.zeppelin.rinterpreter.rscala.RException
-
-class RReplInterpreter(properties: Properties, startSpark : Boolean = true) extends RInterpreter(properties, startSpark) {
-
- // protected val rContext : RContext = RContext(properties)
-
-  def this(properties : Properties) = {
-    this(properties, true)
-  }
-  private var firstCell : Boolean = true
-  def interpret(st: String, context: InterpreterContext): InterpreterResult = {
-    rContext.synchronized {
-      try {
-        import scala.collection.immutable._
-        logger.info("intrpreting " + st)
-        rContext.set(".zreplin", st.split("\n"))
-        rContext.eval(".zreplout <- rzeppelin:::.z.valuate(.zreplin)")
-
-        val reslength: Int = rContext.evalI0("length(.zreplout)")
-        logger.debug("Length of evaluate result is " + reslength)
-        var gotError: Boolean = false
-        val result: String = List.range(1, reslength + 1).map((i: Int) => {
-          rContext.evalS1(s"class(.zreplout[[${i}]])") match {
-            case x: Array[String] if x contains ("recordedplot") => {
-              if (!rContext.testRPackage("repr", fail = false)) return new InterpreterResult(InterpreterResult.Code.ERROR,
-                InterpreterResult.Type.TEXT,
-                "Displaying images through the R REPL requires the repr package, which is not installed.")
-              val image: String = rContext.evalS0(s"base64enc:::base64encode(repr:::repr_jpg(.zreplout[[${i}]]))")
-              return new InterpreterResult(InterpreterResult.Code.SUCCESS,
-                InterpreterResult.Type.IMG, image)
-            }
-            //TODO: If the html contains a link to a file, transform it to a DataURI.  This is necessary for htmlwidgets
-            case x: Array[String] if x contains ("html") => {
-              val html: String = RInterpreter.processHTML(rContext.evalS0(s"rzeppelin:::.z.repr(.zreplout[[${i}]])"))
-              return new InterpreterResult(InterpreterResult.Code.SUCCESS,
-                InterpreterResult.Type.HTML, html)
-            }
-            case x: Array[String] if x contains "data.frame" => {
-              val table: Array[String] = rContext.evalS1( s"""rzeppelin:::.z.table(.zreplout[[${i}]])""")
-              return new InterpreterResult(InterpreterResult.Code.SUCCESS,
-                InterpreterResult.Type.TABLE,
-                table.mkString(sep = "\n"))
-            }
-            case x: Array[String] if x contains "source" => rContext.evalS0(s".zreplout[[${i}]]" + "$src")
-            case x: Array[String] if x contains "character" => rContext.evalS0(s".zreplout[[${i}]]")
-            case x: Array[String] if x contains "packageStartupMessage" => if (firstCell) {""} else {
-              firstCell = true
-              "Package Startup Message: " + rContext.evalS1(s"rzeppelin:::.z.repr(.zreplout[[${i}]])").mkString("\n")
-            }
-            case x: Array[String] if x contains "simpleError" => {
-              gotError = true
-              val error = rContext.evalS1(s"rzeppelin:::.z.repr(.zreplout[[${i}]])").mkString("\n")
-              logger.error(error)
-              error
-            }
-            case _ => rContext.evalS1(s"rzeppelin:::.z.repr(.zreplout[[${i}]])").mkString("\n")
-          }
-        }).mkString("\n\n")
-        return new InterpreterResult({
-          if (!gotError) InterpreterResult.Code.SUCCESS
-          else InterpreterResult.Code.ERROR
-        }, result)
-      } catch {
-        case re: RException => return re.getInterpreterResult(st)
-        case e: Exception => {
-          logger.error("Error interpreting " + st, e)
-          return new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage() + e.getStackTrace)
-        }
-      }
-    }
-  }
-}
diff --git a/r/src/main/scala/org/apache/zeppelin/rinterpreter/package.scala b/r/src/main/scala/org/apache/zeppelin/rinterpreter/package.scala
deleted file mode 100644
index d354107..0000000
--- a/r/src/main/scala/org/apache/zeppelin/rinterpreter/package.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin
-
-// TODO: Keeping interpreter out of spark interpreter group for now, until the context sharing code is developed
-// TEST: rmr2
-// TODO: Link getProgress to plyr (and knitr progress) if possible
-// TODO: Forms?
-// TODO: Completion?  Currently commented-out
-// TODO: It would be nice if the RReplInterpreter output svg instead of jpg, or intelligently selected, at a minimum
-// TODO: Some kind of proxy may be necessary for shiny and widgets see http://blog.dominodatalab.com/interactive-dashboards-with-knitr-and-html-widgets/
-
-package object rinterpreter {
-}
diff --git a/r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/Package.scala b/r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/Package.scala
deleted file mode 100644
index 4028dd5..0000000
--- a/r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/Package.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-package org.apache.zeppelin.rinterpreter
-/*
-Copyright (c) 2013-2015, David B. Dahl, Brigham Young University
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    Redistributions of source code must retain the above copyright
-    notice, this list of conditions and the following disclaimer.
-
-    Redistributions in binary form must reproduce the above copyright
-    notice, this list of conditions and the following disclaimer in
-    the documentation and/or other materials provided with the
-    distribution.
-
-    Neither the name of the <ORGANIZATION> nor the names of its
-    contributors may be used to endorse or promote products derived
-    from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-package object rscala {
-
-  val Version = "0.1.0"
-
-  val Date = "2015-05-15"
-
-}
diff --git a/r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/RClient.scala b/r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/RClient.scala
deleted file mode 100644
index b73524e..0000000
--- a/r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/RClient.scala
+++ /dev/null
@@ -1,527 +0,0 @@
-/*
-Copyright (c) 2013-2015, David B. Dahl, Brigham Young University
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    Redistributions of source code must retain the above copyright
-    notice, this list of conditions and the following disclaimer.
-
-    Redistributions in binary form must reproduce the above copyright
-    notice, this list of conditions and the following disclaimer in
-    the documentation and/or other materials provided with the
-    distribution.
-
-    Neither the name of the <ORGANIZATION> nor the names of its
-    contributors may be used to endorse or promote products derived
-    from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.apache.zeppelin.rinterpreter.rscala
-
-// TODO:  Add libdir to constructor
-
-import java.io._
-import java.net.{InetAddress, ServerSocket}
-
-import org.slf4j.{Logger, LoggerFactory}
-
-import scala.language.dynamics
-
-class RClient (private val in: DataInputStream,
-               private val out: DataOutputStream,
-               val debug: Boolean = true) extends Dynamic {
-  var damagedState : Boolean = false
-  private val logger: Logger = LoggerFactory.getLogger(getClass)
-
-  case class RObjectRef(val reference : String)  {
-    override def toString() = ".$"+reference
-  }
-
-  /** __For rscala developers only__: Sets whether debugging output should be displayed. */
-  def debug_=(v: Boolean) = {
-    if ( v != debug ) {
-      if ( debug ) logger.debug("Sending DEBUG request.")
-      out.writeInt(RClient.Protocol.DEBUG)
-      out.writeInt(if ( v ) 1 else 0)
-      out.flush()
-    }
-  }
-
-  def exit() = {
-    logger.debug("Sending EXIT request.")
-    out.writeInt(RClient.Protocol.EXIT)
-    out.flush()
-  }
-
-  def eval(snippet: String, evalOnly: Boolean = true): Any = try {
-    if (damagedState) throw new RException("Connection to R already damaged")
-    logger.debug("Sending EVAL request.")
-    out.writeInt(RClient.Protocol.EVAL)
-    RClient.writeString(out,snippet)
-    out.flush()
-    val status = in.readInt()
-    val output = RClient.readString(in)
-    if ( output != "" ) {
-      logger.error("R Error " + snippet + " " + output)
-      throw new RException(snippet, output)
-    }
-    if ( status != RClient.Protocol.OK ) throw new RException(snippet, output, "Error in R evaluation.")
-    if ( evalOnly ) null else get(".rzeppelin.last.value")._1
-  } catch {
-    case e : java.net.SocketException => {
-      logger.error("Connection to R appears to have shut down" + e)
-      damagedState = true
-    }
-  }
-
-  def evalI0(snippet: String) = { eval(snippet,true); getI0(".rzeppelin.last.value") }
-
-  def evalB0(snippet: String) = { eval(snippet,true); getB0(".rzeppelin.last.value") }
-
-  def evalS0(snippet: String) = { eval(snippet,true); getS0(".rzeppelin.last.value") }
-
-  def evalI1(snippet: String) = { eval(snippet,true); getI1(".rzeppelin.last.value") }
-
-  def evalB1(snippet: String) = { eval(snippet,true); getB1(".rzeppelin.last.value") }
-
-  def evalS1(snippet: String) = { eval(snippet,true); getS1(".rzeppelin.last.value") }
-
-  def evalR( snippet: String) = { eval(snippet,true); getR( ".rzeppelin.last.value") }
-
-  def set(identifier: String, value: Any): Unit = set(identifier,value,"",true)
-
-  def set(identifier: String, value: Any, index: String = "", singleBrackets: Boolean = true): Unit = {
-    if (damagedState) throw new RException("Connection to R already damaged")
-    val v = value
-    if ( index == "" ) out.writeInt(RClient.Protocol.SET)
-    else if ( singleBrackets ) {
-      out.writeInt(RClient.Protocol.SET_SINGLE)
-      RClient.writeString(out,index)
-    } else {
-      out.writeInt(RClient.Protocol.SET_DOUBLE)
-      RClient.writeString(out,index)
-    }
-    RClient.writeString(out,identifier)
-    if ( v == null || v.isInstanceOf[Unit] ) {
-      logger.debug("... which is null")
-      out.writeInt(RClient.Protocol.NULLTYPE)
-      out.flush()
-      if ( index != "" ) {
-        val status = in.readInt()
-        if ( status != RClient.Protocol.OK ) {
-          val output = RClient.readString(in)
-          if ( output != "" ) {
-            logger.error("R error setting " + output)
-            throw new RException(identifier + value.toString(), output, "Error setting")
-          }
-          throw new RException("Error in R evaluation. Set " + identifier + " to " + value.toString())
-        }
-      }
-      return
-    }
-    val c = v.getClass
-    logger.debug("... whose class is: "+c)
-    logger.debug("... and whose value is: "+v)
-    if ( c.isArray ) {
-      c.getName match {
-        case "[I" =>
-          val vv = v.asInstanceOf[Array[Int]]
-          out.writeInt(RClient.Protocol.VECTOR)
-          out.writeInt(vv.length)
-          out.writeInt(RClient.Protocol.INTEGER)
-          for ( i <- 0 until vv.length ) out.writeInt(vv(i))
-        case "[Z" =>
-          val vv = v.asInstanceOf[Array[Boolean]]
-          out.writeInt(RClient.Protocol.VECTOR)
-          out.writeInt(vv.length)
-          out.writeInt(RClient.Protocol.BOOLEAN)
-          for ( i <- 0 until vv.length ) out.writeInt(if ( vv(i) ) 1 else 0)
-        case "[Ljava.lang.String;" =>
-          val vv = v.asInstanceOf[Array[String]]
-          out.writeInt(RClient.Protocol.VECTOR)
-          out.writeInt(vv.length)
-          out.writeInt(RClient.Protocol.STRING)
-          for ( i <- 0 until vv.length ) RClient.writeString(out,vv(i))
-        case _ =>
-          throw new RException("Unsupported array type: "+c.getName)
-      }
-    } else {
-      c.getName match {
-        case "java.lang.Integer" =>
-          out.writeInt(RClient.Protocol.ATOMIC)
-          out.writeInt(RClient.Protocol.INTEGER)
-          out.writeInt(v.asInstanceOf[Int])
-        case "java.lang.Boolean" =>
-          out.writeInt(RClient.Protocol.ATOMIC)
-          out.writeInt(RClient.Protocol.BOOLEAN)
-          out.writeInt(if (v.asInstanceOf[Boolean]) 1 else 0)
-        case "java.lang.String" =>
-          out.writeInt(RClient.Protocol.ATOMIC)
-          out.writeInt(RClient.Protocol.STRING)
-          RClient.writeString(out,v.asInstanceOf[String])
-        case _ =>
-          throw new RException("Unsupported non-array type: "+c.getName)
-      }
-    }
-    out.flush()
-    if ( index != "" ) {
-      val status = in.readInt()
-      if ( status != RClient.Protocol.OK ) {
-        val output = RClient.readString(in)
-        if ( output != "" ) throw new RException(identifier + value.toString(), output, "Error setting")
-        throw new RException("Error in R evaluation.")
-      }
-    }
-  }
-
-  def get(identifier: String, asReference: Boolean = false): (Any,String) = {
-    logger.debug("Getting: "+identifier)
-    out.writeInt(if ( asReference ) RClient.Protocol.GET_REFERENCE else RClient.Protocol.GET)
-    RClient.writeString(out,identifier)
-    out.flush()
-    if ( asReference ) {
-      val r = in.readInt() match {
-        case RClient.Protocol.REFERENCE => (RObjectRef(RClient.readString(in)),"RObject")
-        case RClient.Protocol.UNDEFINED_IDENTIFIER =>
-          throw new RException("Undefined identifier")
-      }
-      return r
-    }
-    in.readInt match {
-      case RClient.Protocol.NULLTYPE =>
-        logger.debug("Getting null.")
-        (null,"Null")
-      case RClient.Protocol.ATOMIC =>
-        logger.debug("Getting atomic.")
-        in.readInt() match {
-          case RClient.Protocol.INTEGER => (in.readInt(),"Int")
-          case RClient.Protocol.DOUBLE => (in.readDouble(),"Double")
-          case RClient.Protocol.BOOLEAN => (( in.readInt() != 0 ),"Boolean")
-          case RClient.Protocol.STRING => (RClient.readString(in),"String")
-          case _ => throw new RException("Protocol error")
-        }
-      case RClient.Protocol.VECTOR =>
-        logger.debug("Getting vector...")
-        val length = in.readInt()
-        logger.debug("... of length: "+length)
-        in.readInt() match {
-          case RClient.Protocol.INTEGER => (Array.fill(length) { in.readInt() },"Array[Int]")
-          case RClient.Protocol.DOUBLE => (Array.fill(length) { in.readDouble() },"Array[Double]")
-          case RClient.Protocol.BOOLEAN => (Array.fill(length) { ( in.readInt() != 0 ) },"Array[Boolean]")
-          case RClient.Protocol.STRING => (Array.fill(length) { RClient.readString(in) },"Array[String]")
-          case _ => throw new RException("Protocol error")
-        }
-      case RClient.Protocol.MATRIX =>
-        logger.debug("Getting matrix...")
-        val nrow = in.readInt()
-        val ncol = in.readInt()
-        logger.debug("... of dimensions: "+nrow+","+ncol)
-        in.readInt() match {
-          case RClient.Protocol.INTEGER => (Array.fill(nrow) { Array.fill(ncol) { in.readInt() } },"Array[Array[Int]]")
-          case RClient.Protocol.DOUBLE => (Array.fill(nrow) { Array.fill(ncol) { in.readDouble() } },"Array[Array[Double]]")
-          case RClient.Protocol.BOOLEAN => (Array.fill(nrow) { Array.fill(ncol) { ( in.readInt() != 0 ) } },"Array[Array[Boolean]]")
-          case RClient.Protocol.STRING => (Array.fill(nrow) { Array.fill(ncol) { RClient.readString(in) } },"Array[Array[String]]")
-          case _ => throw new RException("Protocol error")
-        }
-      case RClient.Protocol.UNDEFINED_IDENTIFIER => throw new RException("Undefined identifier")
-      case RClient.Protocol.UNSUPPORTED_STRUCTURE => throw new RException("Unsupported data type")
-      case _ => throw new RException("Protocol error")
-    }
-  }
-
-  def getI0(identifier: String): Int = get(identifier) match {
-    case (a,"Int") => a.asInstanceOf[Int]
-    case (a,"Double") => a.asInstanceOf[Double].toInt
-    case (a,"Boolean") => if (a.asInstanceOf[Boolean]) 1 else 0
-    case (a,"String") => a.asInstanceOf[String].toInt
-    case (a,"Array[Int]") => a.asInstanceOf[Array[Int]](0)
-    case (a,"Array[Double]") => a.asInstanceOf[Array[Double]](0).toInt
-    case (a,"Array[Boolean]") => if ( a.asInstanceOf[Array[Boolean]](0) ) 1 else 0
-    case (a,"Array[String]") => a.asInstanceOf[Array[String]](0).toInt
-    case (_,tp) => throw new RException(s"Unable to cast ${tp} to Int")
-  }
-
-  def getD0(identifier: String): Double = get(identifier) match {
-    case (a,"Int") => a.asInstanceOf[Int].toDouble
-    case (a,"Double") => a.asInstanceOf[Double]
-    case (a,"Boolean") => if (a.asInstanceOf[Boolean]) 1.0 else 0.0
-    case (a,"String") => a.asInstanceOf[String].toDouble
-    case (a,"Array[Int]") => a.asInstanceOf[Array[Int]](0).toDouble
-    case (a,"Array[Double]") => a.asInstanceOf[Array[Double]](0)
-    case (a,"Array[Boolean]") => if ( a.asInstanceOf[Array[Boolean]](0) ) 1.0 else 0.0
-    case (a,"Array[String]") => a.asInstanceOf[Array[String]](0).toDouble
-    case (_,tp) => throw new RException(s"Unable to cast ${tp} to Double")
-  }
-
-  def getB0(identifier: String): Boolean = get(identifier) match {
-    case (a,"Int") => a.asInstanceOf[Int] != 0
-    case (a,"Boolean") => a.asInstanceOf[Boolean]
-    case (a,"String") => a.asInstanceOf[String].toLowerCase != "false"
-    case (a,"Array[Int]") => a.asInstanceOf[Array[Int]](0) != 0
-    case (a,"Array[Boolean]") => a.asInstanceOf[Array[Boolean]](0)
-    case (a,"Array[String]") => a.asInstanceOf[Array[String]](0).toLowerCase != "false"
-    case (_,tp) => throw new RException(s"Unable to cast ${tp} to Boolean")
-  }
-
-  def getS0(identifier: String): String = get(identifier) match {
-    case (a,"Int") => a.asInstanceOf[Int].toString
-    case (a,"Boolean") => a.asInstanceOf[Boolean].toString
-    case (a,"String") => a.asInstanceOf[String]
-    case (a,"Array[Int]") => a.asInstanceOf[Array[Int]](0).toString
-    case (a,"Array[Boolean]") => a.asInstanceOf[Array[Boolean]](0).toString
-    case (a,"Array[String]") => a.asInstanceOf[Array[String]](0)
-    case (_,tp) => throw new RException(s"Unable to cast ${tp} to String")
-  }
-
-  def getI1(identifier: String): Array[Int] = get(identifier) match {
-    case (a,"Int") => Array(a.asInstanceOf[Int])
-    case (a,"Boolean") => Array(if (a.asInstanceOf[Boolean]) 1 else 0)
-    case (a,"String") => Array(a.asInstanceOf[String].toInt)
-    case (a,"Array[Int]") => a.asInstanceOf[Array[Int]]
-    case (a,"Array[Boolean]") => a.asInstanceOf[Array[Boolean]].map(x => if (x) 1 else 0)
-    case (a,"Array[String]") => a.asInstanceOf[Array[String]].map(_.toInt)
-    case (_,tp) => throw new RException(s"Unable to cast ${tp} to Array[Int]")
-  }
-
-  def getB1(identifier: String): Array[Boolean] = get(identifier) match {
-    case (a,"Int") => Array(a.asInstanceOf[Int] != 0)
-    case (a,"Boolean") => Array(a.asInstanceOf[Boolean])
-    case (a,"String") => Array(a.asInstanceOf[String].toLowerCase != "false")
-    case (a,"Array[Int]") => a.asInstanceOf[Array[Int]].map(_ != 0)
-    case (a,"Array[Boolean]") => a.asInstanceOf[Array[Boolean]]
-    case (a,"Array[String]") => a.asInstanceOf[Array[String]].map(_.toLowerCase != "false")
-    case (_,tp) => throw new RException(s"Unable to cast ${tp} to Array[Boolean]")
-  }
-
-  def getS1(identifier: String): Array[String] = get(identifier) match {
-    case (a,"Int") => Array(a.asInstanceOf[Int].toString)
-    case (a,"Boolean") => Array(a.asInstanceOf[Boolean].toString)
-    case (a,"String") => Array(a.asInstanceOf[String])
-    case (a,"Array[Int]") => a.asInstanceOf[Array[Int]].map(_.toString)
-    case (a,"Array[Boolean]") => a.asInstanceOf[Array[Boolean]].map(_.toString)
-    case (a,"Array[String]") => a.asInstanceOf[Array[String]]
-    case (_,tp) => throw new RException(s"Unable to cast ${tp} to Array[String]")
-  }
-
-  def getR(identifier: String): RObjectRef = get(identifier,true) match {
-    case (a,"RObject") => a.asInstanceOf[RObjectRef]
-    case (_,tp) => throw new RException(s"Unable to cast ${tp} to RObject")
-  }
-
-  def gc(): Unit = {
-    logger.debug("Sending GC request.")
-    out.writeInt(RClient.Protocol.GC)
-    out.flush()
-  }
-
-
-
-}
-
-object RClient {
-
-  object Protocol {
-
-    // Data Types
-    val UNSUPPORTED_TYPE = 0
-    val INTEGER = 1
-    val DOUBLE =  2
-    val BOOLEAN = 3
-    val STRING =  4
-    val DATE = 5
-    val DATETIME = 6
-
-    // Data Structures
-    val UNSUPPORTED_STRUCTURE = 10
-    val NULLTYPE  = 11
-    val REFERENCE = 12
-    val ATOMIC    = 13
-    val VECTOR    = 14
-    val MATRIX    = 15
-    val LIST      = 16
-    val DATAFRAME = 17
-    val S3CLASS   = 18
-    val S4CLASS   = 19
-    val JOBJ      = 20
-
-    // Commands
-    val EXIT          = 100
-    val RESET         = 101
-    val GC            = 102
-    val DEBUG         = 103
-    val EVAL          = 104
-    val SET           = 105
-    val SET_SINGLE    = 106
-    val SET_DOUBLE    = 107
-    val GET           = 108
-    val GET_REFERENCE = 109
-    val DEF           = 110
-    val INVOKE        = 111
-    val SCALAP        = 112
-
-    // Result
-    val OK = 1000
-    val ERROR = 1001
-    val UNDEFINED_IDENTIFIER = 1002
-
-    // Misc.
-    val CURRENT_SUPPORTED_SCALA_VERSION = "2.10"
-
-  }
-
-  def writeString(out: DataOutputStream, string: String): Unit = {
-    val bytes = string.getBytes("UTF-8")
-    val length = bytes.length
-    out.writeInt(length)
-    out.write(bytes,0,length)
-  }
-
-  def readString(in: DataInputStream): String = {
-    val length = in.readInt()
-    val bytes = new Array[Byte](length)
-    in.readFully(bytes)
-    new String(bytes,"UTF-8")
-  }
-
-  def isMatrix[T](x: Array[Array[T]]): Boolean = {
-    if ( x.length != 0 ) {
-      val len = x(0).length
-      for ( i <- 1 until x.length ) {
-        if ( x(i).length != len ) return false
-      }
-    }
-    true
-  }
-
-  import scala.sys.process._
-  private val logger: Logger = LoggerFactory.getLogger(getClass)
-  val OS = sys.props("os.name").toLowerCase match {
-    case s if s.startsWith("""windows""") => "windows"
-    case s if s.startsWith("""linux""") => "linux"
-    case s if s.startsWith("""unix""") => "linux"
-    case s if s.startsWith("""mac""") => "macintosh"
-    case _ => throw new RException("Unrecognized OS")
-  }
-
-  val defaultArguments = OS match {
-    case "windows" =>    Array[String]("--vanilla","--silent","--slave","--ess")
-    case "linux" =>      Array[String]("--vanilla","--silent","--slave","--interactive")
-    case "unix" =>       Array[String]("--vanilla","--silent","--slave","--interactive")
-    case "macintosh" =>  Array[String]("--vanilla","--silent","--slave","--interactive")
-  }
-
-  lazy val defaultRCmd = OS match {
-    case "windows" =>   findROnWindows
-    case "linux" =>     """R"""
-    case "unix" =>      """R"""
-    case "macintosh" => """R"""
-  }
-
-  def findROnWindows: String = {
-    val NEWLINE = sys.props("line.separator")
-    var result : String = null
-    for ( root <- List("HKEY_LOCAL_MACHINE","HKEY_CURRENT_USER") ) {
-      val out = new StringBuilder()
-      val logger = ProcessLogger((o: String) => { out.append(o); out.append(NEWLINE) },(e: String) => {})
-      try {
-        ("reg query \"" + root + "\\Software\\R-core\\R\" /v \"InstallPath\"") ! logger
-        val a = out.toString.split(NEWLINE).filter(_.matches("""^\s*InstallPath\s*.*"""))(0)
-        result = a.split("REG_SZ")(1).trim() + """\bin\R.exe"""
-      } catch {
-        case _ : Throwable =>
-      }
-    }
-    if ( result == null ) throw new RException("Cannot locate R using Windows registry.")
-    else return result
-  }
-
-  def reader(label: String)(input: InputStream) = {
-    val in = new BufferedReader(new InputStreamReader(input))
-    var line = in.readLine()
-    while ( line != null ) {
-      logger.debug(label+line)
-      line = in.readLine()
-    }
-    in.close()
-  }
-
-  class ScalaSockets(portsFilename: String) {
-    private val logger: Logger = LoggerFactory.getLogger(getClass)
-
-    val serverIn  = new ServerSocket(0,0,InetAddress.getByName(null))
-    val serverOut = new ServerSocket(0,0,InetAddress.getByName(null))
-
-    locally {
-      logger.info("Trying to open ports filename: "+portsFilename)
-      val portNumberFile = new File(portsFilename)
-      val p = new PrintWriter(portNumberFile)
-      p.println(serverIn.getLocalPort+" "+serverOut.getLocalPort)
-      p.close()
-      logger.info("Servers are running on port "+serverIn.getLocalPort+" "+serverOut.getLocalPort)
-    }
-
-    val socketIn = serverIn.accept
-    logger.info("serverinaccept done")
-    val in = new DataInputStream(new BufferedInputStream(socketIn.getInputStream))
-    logger.info("in has been created")
-    val socketOut = serverOut.accept
-    logger.info("serverouacceptdone")
-    val out = new DataOutputStream(new BufferedOutputStream(socketOut.getOutputStream))
-    logger.info("out is done")
-  }
-
-  def makeSockets(portsFilename : String) = new ScalaSockets(portsFilename)
-
-  def apply(): RClient = apply(defaultRCmd)
-
-  def apply(rCmd: String, libdir : String = "",debug: Boolean = false, timeout: Int = 60): RClient = {
-    logger.debug("Creating processIO")
-    var cmd: PrintWriter = null
-    val command = rCmd +: defaultArguments
-    val processCmd = Process(command)
-
-    val processIO = new ProcessIO(
-      o => { cmd = new PrintWriter(o) },
-      reader("STDOUT DEBUG: "),
-      reader("STDERR DEBUG: "),
-      true
-    )
-    val portsFile = File.createTempFile("rscala-","")
-    val processInstance = processCmd.run(processIO)
-    val snippet = s"""
-rscala:::rServe(rscala:::newSockets('${portsFile.getAbsolutePath.replaceAll(File.separator,"/")}',debug=${if ( debug ) "TRUE" else "FALSE"},timeout=${timeout}))
-q(save='no')
-    """
-    while ( cmd == null ) Thread.sleep(100)
-    logger.info("sending snippet " + snippet)
-    cmd.println(snippet)
-    cmd.flush()
-    val sockets = makeSockets(portsFile.getAbsolutePath)
-    sockets.out.writeInt(Protocol.OK)
-    sockets.out.flush()
-    try {
-      assert( readString(sockets.in) == org.apache.zeppelin.rinterpreter.rscala.Version )
-    } catch {
-      case _: Throwable => throw new RException("The scala and R versions of the package don't match")
-    }
-    apply(sockets.in,sockets.out)
-  }
-
-  /** __For rscala developers only__: Returns an instance of the [[RClient]] class.  */
-  def apply(in: DataInputStream, out: DataOutputStream): RClient = new RClient(in,out)
-
-}
\ No newline at end of file
diff --git a/r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/RException.scala b/r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/RException.scala
deleted file mode 100644
index 43d129d..0000000
--- a/r/src/main/scala/org/apache/zeppelin/rinterpreter/rscala/RException.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.rinterpreter.rscala
-
-import org.apache.zeppelin.interpreter.InterpreterResult
-
-class RException(val snippet : String, val error : String, val message : String = "") extends Exception {
-
-  def this(snippet : String) = this(snippet, "")
-
-  def getInterpreterResult : InterpreterResult = new
-      InterpreterResult(InterpreterResult.Code.ERROR, message + "\n" + snippet + "\n" + error)
-
-  def getInterpreterResult(st : String) : InterpreterResult = new
-      InterpreterResult(InterpreterResult.Code.ERROR, message + "\n" + st + "\n" + error)
-}
diff --git a/r/src/main/scala/scala/Console.scala b/r/src/main/scala/scala/Console.scala
deleted file mode 100644
index 6b8f93c..0000000
--- a/r/src/main/scala/scala/Console.scala
+++ /dev/null
@@ -1,491 +0,0 @@
-/*                     __                                               *\
-Copyright (c) 2002-2016 EPFL
-Copyright (c) 2011-2016 Lightbend, Inc. (formerly Typesafe, Inc.)
-
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-
-Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-Neither the name of the EPFL nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF M MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PRO [...]
-*/
-
-
-package scala
-
-import java.io.{BufferedReader, InputStream, InputStreamReader,
-                IOException, OutputStream, PrintStream, Reader}
-import java.text.MessageFormat
-import scala.util.DynamicVariable
-
-
-/** Implements functionality for
- *  printing Scala values on the terminal as well as reading specific values.
- *  Also defines constants for marking up text on ANSI terminals.
- *
- *  @author  Matthias Zenger
- *  @version 1.0, 03/09/2003
- */
-object Console {
-
-  /** Foreground color for ANSI black */
-  final val BLACK      = "\033[30m"
-  /** Foreground color for ANSI red */
-  final val RED        = "\033[31m"
-  /** Foreground color for ANSI green */
-  final val GREEN      = "\033[32m"
-  /** Foreground color for ANSI yellow */
-  final val YELLOW     = "\033[33m"
-  /** Foreground color for ANSI blue */
-  final val BLUE       = "\033[34m"
-  /** Foreground color for ANSI magenta */
-  final val MAGENTA    = "\033[35m"
-  /** Foreground color for ANSI cyan */
-  final val CYAN       = "\033[36m"
-  /** Foreground color for ANSI white */
-  final val WHITE      = "\033[37m"
-
-  /** Background color for ANSI black */
-  final val BLACK_B    = "\033[40m"
-  /** Background color for ANSI red */
-  final val RED_B      = "\033[41m"
-  /** Background color for ANSI green */
-  final val GREEN_B    = "\033[42m"
-  /** Background color for ANSI yellow */
-  final val YELLOW_B   = "\033[43m"
-  /** Background color for ANSI blue */
-  final val BLUE_B     = "\033[44m"
-  /** Background color for ANSI magenta */
-  final val MAGENTA_B  = "\033[45m"
-  /** Background color for ANSI cyan */
-  final val CYAN_B     = "\033[46m"
-  /** Background color for ANSI white */
-  final val WHITE_B    = "\033[47m"
-
-  /** Reset ANSI styles */
-  final val RESET      = "\033[0m"
-  /** ANSI bold */
-  final val BOLD       = "\033[1m"
-  /** ANSI underlines */
-  final val UNDERLINED = "\033[4m"
-  /** ANSI blink */
-  final val BLINK      = "\033[5m"
-  /** ANSI reversed */
-  final val REVERSED   = "\033[7m"
-  /** ANSI invisible */
-  final val INVISIBLE  = "\033[8m"
-
-  // From Scala 2.10.5
-  // Start of rscala patch which only takes effect if RSCALA_TUNNELING environment variable is TRUE.
-  val baosOut = new java.io.ByteArrayOutputStream()
-  val baosErr = new java.io.ByteArrayOutputStream()
-  val psOut = new java.io.PrintStream(baosOut,true)
-  val psErr = new java.io.PrintStream(baosErr,true)
-  val originalOut = java.lang.System.out
-  val originalErr = java.lang.System.err
-  try {
-    if ( sys.env("RSCALA_TUNNELING") == "TRUE" ) {
-      java.lang.System.setOut(psOut)
-      java.lang.System.setErr(psErr)
-    }
-  } catch {
-    case _: Throwable =>
-  }
-  // End of rscala patch.
-
-  private val outVar = new DynamicVariable[PrintStream](java.lang.System.out)
-  private val errVar = new DynamicVariable[PrintStream](java.lang.System.err)
-  private val inVar = new DynamicVariable[BufferedReader](
-    new BufferedReader(new InputStreamReader(java.lang.System.in)))
-
-  /** The default output, can be overridden by `setOut` */
-  def out = outVar.value
-  /** The default error, can be overridden by `setErr` */
-  def err = errVar.value
-  /** The default input, can be overridden by `setIn` */
-  def in = inVar.value
-
-  /** Sets the default output stream.
-   *
-   *  @param out the new output stream.
-   */
-  def setOut(out: PrintStream) { outVar.value = out }
-
-  /** Sets the default output stream for the duration
-   *  of execution of one thunk.
-   *
-   *  @example {{{
-   *  withOut(Console.err) { println("This goes to default _error_") }
-   *  }}}
-   *
-   *  @param out the new output stream.
-   *  @param thunk the code to execute with
-   *               the new output stream active
-   *  @return the results of `thunk`
-   *  @see `withOut[T](out:OutputStream)(thunk: => T)`
-   */
-  def withOut[T](out: PrintStream)(thunk: =>T): T =
-    outVar.withValue(out)(thunk)
-
-  /** Sets the default output stream.
-   *
-   *  @param out the new output stream.
-   */
-  def setOut(out: OutputStream): Unit =
-    setOut(new PrintStream(out))
-
-  /** Sets the default output stream for the duration
-   *  of execution of one thunk.
-   *
-   *  @param out the new output stream.
-   *  @param thunk the code to execute with
-   *               the new output stream active
-   *  @return the results of `thunk`
-   *  @see `withOut[T](out:PrintStream)(thunk: => T)`
-   */
-  def withOut[T](out: OutputStream)(thunk: =>T): T =
-    withOut(new PrintStream(out))(thunk)
-
-
-  /** Sets the default error stream.
-   *
-   *  @param err the new error stream.
-   */
-  def setErr(err: PrintStream) { errVar.value = err }
-
-  /** Set the default error stream for the duration
-   *  of execution of one thunk.
-   *  @example {{{
-   *  withErr(Console.out) { println("This goes to default _out_") }
-   *  }}}
-   *
-   *  @param err the new error stream.
-   *  @param thunk the code to execute with
-   *               the new error stream active
-   *  @return the results of `thunk`
-   *  @see `withErr[T](err:OutputStream)(thunk: =>T)`
-   */
-  def withErr[T](err: PrintStream)(thunk: =>T): T =
-    errVar.withValue(err)(thunk)
-
-  /** Sets the default error stream.
-   *
-   *  @param err the new error stream.
-   */
-  def setErr(err: OutputStream): Unit =
-    setErr(new PrintStream(err))
-
-  /** Sets the default error stream for the duration
-   *  of execution of one thunk.
-   *
-   *  @param err the new error stream.
-   *  @param thunk the code to execute with
-   *               the new error stream active
-   *  @return the results of `thunk`
-   *  @see `withErr[T](err:PrintStream)(thunk: =>T)`
-   */
-  def withErr[T](err: OutputStream)(thunk: =>T): T =
-    withErr(new PrintStream(err))(thunk)
-
-
-  /** Sets the default input stream.
-   *
-   *  @param reader specifies the new input stream.
-   */
-  def setIn(reader: Reader) {
-    inVar.value = new BufferedReader(reader)
-  }
-
-  /** Sets the default input stream for the duration
-   *  of execution of one thunk.
-   *
-   *  @example {{{
-   *  val someFile:Reader = openFile("file.txt")
-   *  withIn(someFile) {
-   *    // Reads a line from file.txt instead of default input
-   *    println(readLine)
-   *  }
-   *  }}}
-   *
-   *  @param thunk the code to execute with
-   *               the new input stream active
-   *
-   * @return the results of `thunk`
-   * @see `withIn[T](in:InputStream)(thunk: =>T)`
-   */
-  def withIn[T](reader: Reader)(thunk: =>T): T =
-    inVar.withValue(new BufferedReader(reader))(thunk)
-
-  /** Sets the default input stream.
-   *
-   *  @param in the new input stream.
-   */
-  def setIn(in: InputStream) {
-    setIn(new InputStreamReader(in))
-  }
-
-  /** Sets the default input stream for the duration
-   *  of execution of one thunk.
-   *
-   *  @param in the new input stream.
-   *  @param thunk the code to execute with
-   *               the new input stream active
-   * @return the results of `thunk`
-   * @see `withIn[T](reader:Reader)(thunk: =>T)`
-   */
-  def withIn[T](in: InputStream)(thunk: =>T): T =
-    withIn(new InputStreamReader(in))(thunk)
-
-  /** Prints an object to `out` using its `toString` method.
-   *
-   *  @param obj the object to print; may be null.
-   */
-  def print(obj: Any) {
-    out.print(if (null == obj) "null" else obj.toString())
-  }
-
-  /** Flushes the output stream. This function is required when partial
-   *  output (i.e. output not terminated by a newline character) has
-   *  to be made visible on the terminal.
-   */
-  def flush() { out.flush() }
-
-  /** Prints a newline character on the default output.
-   */
-  def println() { out.println() }
-
-  /** Prints out an object to the default output, followed by a newline character.
-   *
-   *  @param x the object to print.
-   */
-  def println(x: Any) { out.println(x) }
-
-  /** Prints its arguments as a formatted string to the default output,
-   *  based on a string pattern (in a fashion similar to printf in C).
-   *
-   *  The interpretation of the formatting patterns is described in
-   *  <a href="" target="contentFrame" class="java/util/Formatter">
-   *  `java.util.Formatter`</a>.
-   *
-   *  @param text the pattern for formatting the arguments.
-   *  @param args the arguments used to instantiating the pattern.
-   *  @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments
-   */
-  def printf(text: String, args: Any*) { out.print(text format (args : _*)) }
-
-  /** Read a full line from the default input.  Returns `null` if the end of the
-   * input stream has been reached.
-   *
-   * @return the string read from the terminal or null if the end of stream was reached.
-   */
-  def readLine(): String = in.readLine()
-
-  /** Print formatted text to the default output and read a full line from the default input.
-   *  Returns `null` if the end of the input stream has been reached.
-   *
-   *  @param text the format of the text to print out, as in `printf`.
-   *  @param args the parameters used to instantiate the format, as in `printf`.
-   *  @return the string read from the default input
-   */
-  def readLine(text: String, args: Any*): String = {
-    printf(text, args: _*)
-    readLine()
-  }
-
-  /** Reads a boolean value from an entire line of the default input.
-   *  Has a fairly liberal interpretation of the input.
-   *
-   *  @return the boolean value read, or false if it couldn't be converted to a boolean
-   *  @throws java.io.EOFException if the end of the input stream has been reached.
-   */
-  def readBoolean(): Boolean = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toLowerCase() match {
-        case "true" => true
-        case "t" => true
-        case "yes" => true
-        case "y" => true
-        case _ => false
-      }
-  }
-
-  /** Reads a byte value from an entire line of the default input.
-   *
-   *  @return the Byte that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte
-   */
-  def readByte(): Byte = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toByte
-  }
-
-  /** Reads a short value from an entire line of the default input.
-   *
-   *  @return the short that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Short
-   */
-  def readShort(): Short = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toShort
-  }
-
-  /** Reads a char value from an entire line of the default input.
-   *
-   *  @return the Char that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty
-   */
-  def readChar(): Char = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s charAt 0
-  }
-
-  /** Reads an int value from an entire line of the default input.
-   *
-   *  @return the Int that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to an Int
-   */
-  def readInt(): Int = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toInt
-  }
-
-  /** Reads an long value from an entire line of the default input.
-   *
-   *  @return the Long that was read
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Long
-   */
-  def readLong(): Long = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toLong
-  }
-
-  /** Reads a float value from an entire line of the default input.
-   *  @return the Float that was read.
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
-   *
-   */
-  def readFloat(): Float = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toFloat
-  }
-
-  /** Reads a double value from an entire line of the default input.
-   *
-   *  @return the Double that was read.
-   *  @throws java.io.EOFException if the end of the
-   *  input stream has been reached.
-   *  @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
-   */
-  def readDouble(): Double = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      s.toDouble
-  }
-
-  /** Reads in some structured input (from the default input), specified by
-   *  a format specifier. See class `java.text.MessageFormat` for details of
-   *  the format specification.
-   *
-   *  @param format the format of the input.
-   *  @return a list of all extracted values.
-   *  @throws java.io.EOFException if the end of the input stream has been
-   *          reached.
-   */
-  def readf(format: String): List[Any] = {
-    val s = readLine()
-    if (s == null)
-      throw new java.io.EOFException("Console has reached end of input")
-    else
-      textComponents(new MessageFormat(format).parse(s))
-  }
-
-  /** Reads in some structured input (from the default input), specified by
-   *  a format specifier, returning only the first value extracted, according
-   *  to the format specification.
-   *
-   *  @param format format string, as accepted by `readf`.
-   *  @return The first value that was extracted from the input
-   */
-  def readf1(format: String): Any = readf(format).head
-
-  /** Reads in some structured input (from the default input), specified
-   *  by a format specifier, returning only the first two values extracted,
-   *  according to the format specification.
-   *
-   *  @param format format string, as accepted by `readf`.
-   *  @return A [[scala.Tuple2]] containing the first two values extracted
-   */
-  def readf2(format: String): (Any, Any) = {
-    val res = readf(format)
-    (res.head, res.tail.head)
-  }
-
-  /** Reads in some structured input (from the default input), specified
-   *  by a format specifier, returning only the first three values extracted,
-   *  according to the format specification.
-   *
-   *  @param format format string, as accepted by `readf`.
-   *  @return A [[scala.Tuple3]] containing the first three values extracted
-   */
-  def readf3(format: String): (Any, Any, Any) = {
-    val res = readf(format)
-    (res.head, res.tail.head, res.tail.tail.head)
-  }
-
-  private def textComponents(a: Array[AnyRef]): List[Any] = {
-    var i: Int = a.length - 1
-    var res: List[Any] = Nil
-    while (i >= 0) {
-      res = (a(i) match {
-        case x: java.lang.Boolean   => x.booleanValue()
-        case x: java.lang.Byte      => x.byteValue()
-        case x: java.lang.Short     => x.shortValue()
-        case x: java.lang.Character => x.charValue()
-        case x: java.lang.Integer   => x.intValue()
-        case x: java.lang.Long      => x.longValue()
-        case x: java.lang.Float     => x.floatValue()
-        case x: java.lang.Double    => x.doubleValue()
-        case x => x
-      }) :: res;
-      i -= 1
-    }
-    res
-  }
-}
diff --git a/r/src/test/scala/org/apache/spark/api/r/RBackendHelperTest.scala b/r/src/test/scala/org/apache/spark/api/r/RBackendHelperTest.scala
deleted file mode 100644
index cdc314d..0000000
--- a/r/src/test/scala/org/apache/spark/api/r/RBackendHelperTest.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.api.r
-
-import org.scalatest.FlatSpec
-import org.scalatest.Matchers._
-
-class RBackendHelperTest extends FlatSpec {
-
-  val backend : RBackendHelper = RBackendHelper()
-  val backend2 : RBackendHelper = RBackendHelper()
-
-  "RBackendHelper" should "create a SparkR backend" in {
-    val rbackend = backend
-    assert(true) // only looking for exceptions here
-  }
-
-  it should "initialize properly, returning a port > 0" in {
-    val port = backend.init()
-    assert(port > 0)
-  }
-
-  it should "start a thread" in {
-    val backend = backend2
-    backend.init()
-    val thread = backend.start()
-    thread shouldBe a [Thread]
-  }
-  
-  it should "close without error" in {
-    backend2.close
-    assert(true) // only looking for exceptions
-  }
-}
diff --git a/r/src/test/scala/org/apache/zeppelin/rinterpreter/RContextInitTest.scala b/r/src/test/scala/org/apache/zeppelin/rinterpreter/RContextInitTest.scala
deleted file mode 100644
index 3d74e58..0000000
--- a/r/src/test/scala/org/apache/zeppelin/rinterpreter/RContextInitTest.scala
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.rinterpreter
-
-import java.io.{File, PrintWriter}
-import java.nio.file.{Files, Paths}
-
-import org.apache.zeppelin.rinterpreter.rscala.RClient
-import org.apache.zeppelin.rinterpreter.rscala.RClient._
-import org.scalatest.Matchers._
-import org.scalatest._
-
-class RContextInitTest extends FlatSpec {
-  import scala.sys.process._
-  var cmd: PrintWriter = null
-  val command = RClient.defaultRCmd +: RClient.defaultArguments
-  var processCmd : ProcessBuilder = null
-
-    "Process command" should "create a process builder" in {
-      processCmd = Process(command)
-      processCmd shouldBe a[ProcessBuilder]
-    }
-  it should "be persistent for testing purposes" in {
-    processCmd shouldBe a [ProcessBuilder]
-  }
-
-  var processIO : ProcessIO = null
-
-  "Creating Process IO" should "not throw an exception" in {
-    processIO = new ProcessIO(
-      o => {
-        cmd = new PrintWriter(o)
-      },
-      reader("STDOUT DEBUG: "),
-      reader("STDERR DEBUG: "),
-      true
-    )
-    processIO shouldBe a [ProcessIO]
-  }
-  var portsFile : File = null
-    "A temp file " should "be created" in {
-      portsFile = File.createTempFile("rscala-", "")
-      assertResult(true) {portsFile.exists()}
-    }
-  var processInstance : Process = null
-
-  "Process instance" should "launch" in {
-    processInstance = processCmd.run(processIO)
-    assert(true)
-  }
-  var libpath : String = null
-  "RZeppelin R Package" should "be found" in {
-    libpath  = if (Files.exists(Paths.get("R/lib"))) "R/lib"
-    else if (Files.exists(Paths.get("../R/lib"))) "../R/lib"
-    else throw new RuntimeException("Could not find rzeppelin - it must be in either R/lib or ../R/lib")
-    assert(Files.exists(Paths.get(libpath + "/rzeppelin")))
-  }
-  var snippet : String = null
-
-  "Creating the snippit" should "be impossible to fail" in {
-    snippet =     s"""
-library(lib.loc="$libpath", rzeppelin)
-rzeppelin:::rServe(rzeppelin:::newSockets('${portsFile.getAbsolutePath.replaceAll(File.separator, "/")}',debug=FALSE,timeout=60))
-q(save='no')"""
-    assert(true)
-  }
-  "Cmd" should "stop being null" in {
-    while (cmd == null) Thread.sleep(100)
-    assert(cmd != null)
-  }
-  it should "accept the snippet" in {
-    cmd.println(snippet)
-    cmd.flush()
-    assert(true)
-  }
-
-  var sockets : ScalaSockets = null
-
-  "Scala Sockets" should "be created and signal OK" in {
-    sockets = new ScalaSockets(portsFile.getAbsolutePath)
-    sockets.out.writeInt(RClient.Protocol.OK)
-    sockets.out.flush()
-    assert(true)
-  }
-  "The R and Scala versions" should "match" in {
-    assert(RClient.readString(sockets.in) == org.apache.zeppelin.rinterpreter.rscala.Version)
-  }
-  var rcon : RContext = null
-  "Creating an RContext" should "not fail" in {
-    rcon = new RContext(sockets, false)
-  }
-  "An open RContext" should "destroy safely" in {
-    rcon.close()
-    assertResult(false) {
-      rcon.isOpen
-    }
-  }
-}
\ No newline at end of file
diff --git a/r/src/test/scala/org/apache/zeppelin/rinterpreter/RContextTest.scala b/r/src/test/scala/org/apache/zeppelin/rinterpreter/RContextTest.scala
deleted file mode 100644
index 8b11156..0000000
--- a/r/src/test/scala/org/apache/zeppelin/rinterpreter/RContextTest.scala
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.zeppelin.rinterpreter
-
-import java.util.Properties
-
-import org.apache.zeppelin.RTest
-import org.apache.zeppelin.rinterpreter.rscala.RException
-import org.apache.zeppelin.spark.SparkInterpreter
-import org.scalatest.Matchers._
-import org.scalatest._
-
-class RContextTest extends FlatSpec {
-  RContext.resetRcon()
-  
-  val rcon = RContext(new Properties(), "test")
-  
-  "The RContext Singleton" should "create an RContext without Spark" in { () =>
-    rcon shouldBe a[RContext]
-  }
-  
-  "The RContext" should "be openable without spark" in { () =>
-    rcon.open(None)
-    assert(rcon.isOpen)
-  }
-
-  it should "be able to confirm that stats is available" taggedAs(RTest) in { () =>
-    assertResult(true) {
-      rcon.testRPackage("stats")
-    }
-  }
-
-  it should "be able to confirm that a bogus package is not available"  taggedAs(RTest) in { () =>
-    assertResult(false) {
-      rcon.testRPackage("thisisagarbagepackagename")
-    }
-  }
-
-  it should "be able to add 2 + 2"  taggedAs(RTest) in { () =>
-    assertResult(4) {
-      rcon.evalI0("2 + 2")
-    }
-  }
-  it should "be able to return a vector"  taggedAs(RTest) in { () =>
-    assertResult(10) {
-      rcon.evalI1("1:10").length
-    }
-  }
-  it should "be able to return a string"  taggedAs(RTest) in { () =>
-    
-    assertResult("hello world") {
-      rcon.evalS0("'hello world'")
-    }
-  }
-  it should "be able to return a vector of strings"  taggedAs(RTest)  in { () =>
-    
-    assertResult(26) {
-      rcon.evalS1("LETTERS").length
-    }
-  }
-
-  it should "throw an RException if told to evaluate garbage code"  taggedAs(RTest)  in { () =>
-    
-    intercept[RException] {
-      rcon.eval("funkyfunction()")
-    }
-  }
-
-//  it should "Throw an exception if we try to initialize SparkR without a SQLContext" in {() =>
-//
-//    intercept[RuntimeException] {
-//      rcon.initializeSparkRTest()
-//    }
-//  }
-
-  it should "have rzeppelin available"  taggedAs(RTest) in { () =>
-    
-    assertResult(true) {
-      rcon.testRPackage("rzeppelin")
-    }
-  }
-  it should "have evaluate available"  taggedAs(RTest) in { () =>
-    
-    assertResult(true) {
-      rcon.testRPackage("evaluate")
-    }
-  }
-  it should "have repr available"  taggedAs(RTest) in { () =>
-    
-    assertResult(true) {
-      rcon.testRPackage("repr")
-    }
-  }
-  it should "also close politely"  taggedAs(RTest) in { () =>
-    
-    rcon.close()
-    assertResult(2) {rcon.isOpen}
-  }
-}
diff --git a/r/src/test/scala/org/apache/zeppelin/rinterpreter/RInterpreterTest.scala b/r/src/test/scala/org/apache/zeppelin/rinterpreter/RInterpreterTest.scala
deleted file mode 100644
index 443394c..0000000
--- a/r/src/test/scala/org/apache/zeppelin/rinterpreter/RInterpreterTest.scala
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin.rinterpreter
-
-import java.util.Properties
-
-import org.apache.zeppelin.RTest
-import org.apache.zeppelin.interpreter.{Interpreter, InterpreterContext, InterpreterResult, InterpreterGroup}
-import org.scalatest.Matchers._
-import org.scalatest._
-import java.util.ArrayList
-
-class RInterpreterTest extends FlatSpec {
-
-  RContext.resetRcon()
-
-  class RIntTester extends RInterpreter(new Properties(), startSpark = false) {
-
-    def interpret(s: String, interpreterContext: InterpreterContext): InterpreterResult = {
-      val result : Array[String] = rContext.evalS1(s)
-      new InterpreterResult(InterpreterResult.Code.SUCCESS, result.mkString("\n"))
-    }
-  }
-  val rint = new RIntTester()
-
-  "An RInterpreter" should "exist" in {
-    assert(rint != null)
-  }
-
-  it should "not complain when we assign it a group" in {
-    val grp : InterpreterGroup = new InterpreterGroup("test")
-    val lst : ArrayList[Interpreter] = new ArrayList[Interpreter]()
-    lst.add(rint)
-    grp.put(rint.getClassName(), lst)
-    rint.setInterpreterGroup(grp)
-  }
-
-  it should "create a fresh rContext when we ask for one" in {
-    assert(! rint.getrContext.isOpen)
-  }
-
-  it should "open"  taggedAs(RTest) in {
-    rint.open()
-    assert(rint.getrContext.isOpen)
-  }
-
-  it should "have rzeppelin available"  taggedAs(RTest) in {
-    assume(rint.getrContext.isOpen)
-    assert(rint.getrContext.testRPackage("rzeppelin"))
-  }
-  it should "have an rContext able to do simple addition" taggedAs(RTest)  in {
-    assume(rint.getrContext.isOpen)
-    assert(rint.getrContext.evalI0("2 + 2") == 4)
-  }
-
-
-
-/*  it should "have a functional completion function" taggedAs(RTest) in {
-    val result = rint.hiddenCompletion("hi", 3)
-    result should (contain ("hist"))
-  }*/
-
-  it should "have a working progress meter" in {
-    rint.getrContext.setProgress(50)
-    assertResult(50) {
-      rint.getrContext.getProgress
-    }
-  }
-
-  it should "have persistent properties" in {
-    val props = new Properties()
-    props.setProperty("hello", "world")
-    rint.setProperties(props)
-    assertResult("world") {
-      rint.getProperty("hello")
-    }
-  }
-
-  var rint2 : RIntTester = null
-
-  it should "Share RContexts if they share the same InterpreterGroup" in {
-    rint2 = new RIntTester()
-    val lst : ArrayList[Interpreter] = new ArrayList[Interpreter]()
-    lst.add(rint2)
-    val grp = rint.getInterpreterGroup()
-    grp.put(rint2.getClassName(), lst)
-    rint2.setInterpreterGroup(grp)
-    rint2.open()
-    rint.getrContext should be theSameInstanceAs rint2.getrContext
-  }
-
-  "Opening the second RInterpreter" should "not have closed the first RContext" in {
-    assert(rint.getrContext.isOpen)
-  }
-
-  var rint3 : RIntTester = null
-
-  "An RInterpreter in a different InterpreterGroup" should "have a different R Context" in {
-    rint3 = new RIntTester()
-    val grp : InterpreterGroup = new InterpreterGroup("othertest")
-    val lst : ArrayList[Interpreter] = new ArrayList[Interpreter]()
-    lst.add(rint3)
-    grp.put(rint3.getClassName(), lst)
-    rint3.setInterpreterGroup(grp)
-    rint3.open()
-    rint3.getrContext shouldNot be theSameInstanceAs rint2.getrContext
-  }
-
-  "The first RInterpreter" should "close politely" in {
-    rint.close()
-    assert(!rint.getrContext.isOpen)
-  }
-
-  "and so" should "the other one" in {
-    rint2.close()
-    assert(!rint2.getrContext.isOpen)
-  }
-
-  "and " should "the third one" in {
-    rint3.close()
-    assert(!rint2.getrContext.isOpen)
-  }
-
-//  fixture.sparky.close()
-
-}
diff --git a/r/src/test/scala/org/apache/zeppelin/rinterpreter/WrapperTest.scala b/r/src/test/scala/org/apache/zeppelin/rinterpreter/WrapperTest.scala
deleted file mode 100644
index a85cfe6..0000000
--- a/r/src/test/scala/org/apache/zeppelin/rinterpreter/WrapperTest.scala
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.zeppelin.rinterpreter
-
-import java.util
-import java.util.Properties
-
-import org.apache.zeppelin.interpreter.{Interpreter, InterpreterGroup, InterpreterResult}
-import org.scalatest.FlatSpec
-import java.util.List
-import org.scalatest.Matchers._
-
-class WrapperTest extends FlatSpec {
-  RContext.resetRcon()
-
-  val repl: RRepl = new RRepl(new Properties(), false)
-  val group : InterpreterGroup = new InterpreterGroup()
-  var lst = new util.LinkedList[Interpreter]()
-  lst.add(repl)
-  group.put(repl.getClassName(), lst)
-  repl.setInterpreterGroup(group)
-
-  "The R REPL" should "exist and be of the right class" in {
-
-    repl shouldBe a[RRepl]
-  }
-
-  it should "Have a RRepl Interpreter inside" in {
-    repl.getInnerInterpreter shouldBe a[RReplInterpreter]
-  }
-  val repi = repl.getInnerInterpreter.asInstanceOf[RReplInterpreter]
-
-  it should "have a fresh rContext" in {
-    assert(!repi.getrContext.isOpen)
-  }
-
-  val knitr: KnitR = new KnitR(new Properties(), false)
-  lst = new util.LinkedList[Interpreter]()
-  lst.add(knitr)
-  group.put(knitr.getClassName(), lst)
-  knitr.setInterpreterGroup(group)
-
-  "The KnitR wrapper" should "exist and be of the right class" in {
-    knitr shouldBe a[KnitR]
-    }
-    it should "have a KnitRInterpreter inside" in {
-      knitr.getInnerInterpreter shouldBe a [KnitRInterpreter]
-    }
-
-  it should "share the RContext" in {
-    knitr.getInnerInterpreter.asInstanceOf[KnitRInterpreter].getrContext should be theSameInstanceAs repi.getrContext
-  }
-
-  it should "open without error" in {
-    knitr.open()
-    assert(knitr.getInnerInterpreter.asInstanceOf[KnitRInterpreter].getrContext.isOpen)
-  }
-
-  it should "produce HTML in response to a simple query" in {
-    val result = knitr.interpret(
-      """
-        |```{r}
-        |2 + 2
-        |```
-      """.stripMargin, null)
-    withClue(result.message().get(0).getData()) {
-      result should have (
-      'code (InterpreterResult.Code.SUCCESS)
-      )
-    }
-  }
-
-  it should "close properly" in {
-    repi.getrContext.close()
-    assertResult(false) {
-      repi.getrContext.isOpen
-    }
-  }
-
-  "Just in case there are two rContexts, the other one" should "close properly also" in {
-    val rcon = knitr.getInnerInterpreter.asInstanceOf[KnitRInterpreter].getrContext
-    rcon.close()
-    assertResult(false) {
-      rcon.isOpen
-    }
-  }
-
-}
diff --git a/r/src/test/scala/org/apache/zeppelin/rinterpreter/package.scala b/r/src/test/scala/org/apache/zeppelin/rinterpreter/package.scala
deleted file mode 100644
index eceeec5..0000000
--- a/r/src/test/scala/org/apache/zeppelin/rinterpreter/package.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.zeppelin
-
-import org.scalatest.Tag
-
-object RTest extends Tag("RTest")
-object SparkTest extends Tag("SparkTest")