You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by mo...@apache.org on 2015/04/06 06:05:52 UTC

[01/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Repository: incubator-zeppelin
Updated Branches:
  refs/heads/master 7a60b3355 -> 669d408dc


http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/utility/IdHashes.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/utility/IdHashes.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/utility/IdHashes.java
new file mode 100644
index 0000000..812dd76
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/utility/IdHashes.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.notebook.utility;
+
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Generate Tiny ID.
+ *
+ * @author anthonycorbacho
+ *
+ */
+public class IdHashes {
+  public static final char[] DICTIONARY = new char[] {'1', '2', '3', '4', '5', '6', '7', '8', '9',
+    'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U',
+    'V', 'W', 'X', 'Y', 'Z'};
+
+  /**
+   * encodes the given string into the base of the dictionary provided in the constructor.
+   *
+   * @param value the number to encode.
+   * @return the encoded string.
+   */
+  public static String encode(Long value) {
+
+    List<Character> result = new ArrayList<Character>();
+    BigInteger base = new BigInteger("" + DICTIONARY.length);
+    int exponent = 1;
+    BigInteger remaining = new BigInteger(value.toString());
+    while (true) {
+      BigInteger a = base.pow(exponent); // 16^1 = 16
+      BigInteger b = remaining.mod(a); // 119 % 16 = 7 | 112 % 256 = 112
+      BigInteger c = base.pow(exponent - 1);
+      BigInteger d = b.divide(c);
+
+      // if d > dictionary.length, we have a problem. but BigInteger doesnt have
+      // a greater than method :-( hope for the best. theoretically, d is always
+      // an index of the dictionary!
+      result.add(DICTIONARY[d.intValue()]);
+      remaining = remaining.subtract(b); // 119 - 7 = 112 | 112 - 112 = 0
+
+      // finished?
+      if (remaining.equals(BigInteger.ZERO)) {
+        break;
+      }
+
+      exponent++;
+    }
+
+    // need to reverse it, since the start of the list contains the least significant values
+    StringBuffer sb = new StringBuffer();
+    for (int i = result.size() - 1; i >= 0; i--) {
+      sb.append(result.get(i));
+    }
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/util/Util.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/util/Util.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/util/Util.java
new file mode 100644
index 0000000..135ffda
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/util/Util.java
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.util;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * TODO(moon) : add description.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class Util {
+
+  public static String[] split(String str, char split) {
+    return split(str, new String[] {String.valueOf(split)}, false);
+  }
+
+  public static String[] split(String str, String[] splitters, boolean includeSplitter) {
+    String escapeSeq = "\"',;<%>";
+    char escapeChar = '\\';
+    String[] blockStart = new String[] {"\"", "'", "<%", "N_<"};
+    String[] blockEnd = new String[] {"\"", "'", "%>", "N_>"};
+
+    return split(str, escapeSeq, escapeChar, blockStart, blockEnd, splitters, includeSplitter);
+
+  }
+
+  public static String[] split(String str, String escapeSeq, char escapeChar, String[] blockStart,
+      String[] blockEnd, String[] splitters, boolean includeSplitter) {
+
+    List<String> splits = new ArrayList<String>();
+
+    String curString = "";
+
+    boolean escape = false; // true when escape char is found
+    int lastEscapeOffset = -1;
+    int blockStartPos = -1;
+    List<Integer> blockStack = new LinkedList<Integer>();
+
+    for (int i = 0; i < str.length(); i++) {
+      char c = str.charAt(i);
+
+      // escape char detected
+      if (c == escapeChar && escape == false) {
+        escape = true;
+        continue;
+      }
+
+      // escaped char comes
+      if (escape == true) {
+        if (escapeSeq.indexOf(c) < 0) {
+          curString += escapeChar;
+        }
+        curString += c;
+        escape = false;
+        lastEscapeOffset = curString.length();
+        continue;
+      }
+
+      if (blockStack.size() > 0) { // inside of block
+        curString += c;
+        // check multichar block
+        boolean multicharBlockDetected = false;
+        for (int b = 0; b < blockStart.length; b++) {
+          if (blockStartPos >= 0
+              && getBlockStr(blockStart[b]).compareTo(str.substring(blockStartPos, i)) == 0) {
+            blockStack.remove(0);
+            blockStack.add(0, b);
+            multicharBlockDetected = true;
+            break;
+          }
+        }
+        if (multicharBlockDetected == true) {
+          continue;
+        }
+
+        // check if current block is nestable
+        if (isNestedBlock(blockStart[blockStack.get(0)]) == true) {
+          // try to find nested block start
+
+          if (curString.substring(lastEscapeOffset + 1).endsWith(
+              getBlockStr(blockStart[blockStack.get(0)])) == true) {
+            blockStack.add(0, blockStack.get(0)); // block is started
+            blockStartPos = i;
+            continue;
+          }
+        }
+
+        // check if block is finishing
+        if (curString.substring(lastEscapeOffset + 1).endsWith(
+            getBlockStr(blockEnd[blockStack.get(0)]))) {
+          // the block closer is one of the splitters (and not nested block)
+          if (isNestedBlock(blockEnd[blockStack.get(0)]) == false) {
+            for (String splitter : splitters) {
+              if (splitter.compareTo(getBlockStr(blockEnd[blockStack.get(0)])) == 0) {
+                splits.add(curString);
+                if (includeSplitter == true) {
+                  splits.add(splitter);
+                }
+                curString = "";
+                lastEscapeOffset = -1;
+
+                break;
+              }
+            }
+          }
+          blockStartPos = -1;
+          blockStack.remove(0);
+          continue;
+        }
+
+      } else { // not in the block
+        boolean splitted = false;
+        for (String splitter : splitters) {
+          // forward check for splitter
+          if (splitter.compareTo(
+              str.substring(i, Math.min(i + splitter.length(), str.length()))) == 0) {
+            splits.add(curString);
+            if (includeSplitter == true) {
+              splits.add(splitter);
+            }
+            curString = "";
+            lastEscapeOffset = -1;
+            i += splitter.length() - 1;
+            splitted = true;
+            break;
+          }
+        }
+        if (splitted == true) {
+          continue;
+        }
+
+        // add char to current string
+        curString += c;
+
+        // check if block is started
+        for (int b = 0; b < blockStart.length; b++) {
+          if (curString.substring(lastEscapeOffset + 1)
+                       .endsWith(getBlockStr(blockStart[b])) == true) {
+            blockStack.add(0, b); // block is started
+            blockStartPos = i;
+            break;
+          }
+        }
+      }
+    }
+    if (curString.length() > 0) {
+      splits.add(curString.trim());
+    }
+    return splits.toArray(new String[] {});
+
+  }
+
+  private static String getBlockStr(String blockDef) {
+    if (blockDef.startsWith("N_")) {
+      return blockDef.substring("N_".length());
+    } else {
+      return blockDef;
+    }
+  }
+
+  private static boolean isNestedBlock(String blockDef) {
+    if (blockDef.startsWith("N_")) {
+      return true;
+    } else {
+      return false;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/resources/exec.erb
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/resources/exec.erb b/zeppelin-zengine/src/main/resources/exec.erb
deleted file mode 100644
index dc3877e..0000000
--- a/zeppelin-zengine/src/main/resources/exec.erb
+++ /dev/null
@@ -1,15 +0,0 @@
-<html>
-  <head>
-  </head>
-  <body>
-
-<% if z and z.result != nil %>
-	<% if z.result.rows != nil %>
-<pre><% z.result.rows.each do |row| %>
-<% row.each do |cell| %><%= cell %><% end %><% end %>
-<% end %></pre>
-	
-<% end %>
-
-  </body>
-</html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/resources/table.erb
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/resources/table.erb b/zeppelin-zengine/src/main/resources/table.erb
deleted file mode 100644
index 188902e..0000000
--- a/zeppelin-zengine/src/main/resources/table.erb
+++ /dev/null
@@ -1,36 +0,0 @@
-<html>
-  <head>
-    <script type='text/javascript' src='https://www.google.com/jsapi'></script>
-    <script type='text/javascript'>
-      google.load('visualization', '1', {packages:['table']});
-      google.setOnLoadCallback(drawTable);
-      function drawTable() {
-        var data = new google.visualization.DataTable();
-<% if z and z.result != nil %>
-	<% z.result.getColumnDef.each do |col| %>
-	        data.addColumn('string', '<%=col.name%>');
-	<% end %>
-	<% if z.result.rows != nil %>
-	        data.addRows([
-	            <% z.result.rows.each do |row| %>
-		      [
-			  <% row.each do |cell| %>
-			    '<%= cell.to_s().gsub("'","\\\\'") %>',
-			  <% end %>
-		      ],
-		    <% end %>
-		]);
-	
-	<% end %>
-<% end %>
-        var table = new google.visualization.Table(document.getElementById('table_div'));
-        table.draw(data, {showRowNumber: true});
-      }
-    </script>
-  </head>
-  <body>
-    <div id="table_div"></div>
-  </body>
-</html>
-
-

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/InterpreterFactoryTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/InterpreterFactoryTest.java b/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/InterpreterFactoryTest.java
deleted file mode 100644
index dbb65ea..0000000
--- a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/InterpreterFactoryTest.java
+++ /dev/null
@@ -1,112 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-import java.util.Properties;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration;
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration.ConfVars;
-import com.nflabs.zeppelin.interpreter.mock.MockInterpreter1;
-import com.nflabs.zeppelin.interpreter.mock.MockInterpreter2;
-
-public class InterpreterFactoryTest {
-
-	private InterpreterFactory factory;
-  private File tmpDir;
-  private ZeppelinConfiguration conf;
-  private InterpreterContext context;
-
-  @Before
-	public void setUp() throws Exception {
-    tmpDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis());
-    tmpDir.mkdirs();
-    new File(tmpDir, "conf").mkdirs();
-
-	  MockInterpreter1.register("mock1", "com.nflabs.zeppelin.interpreter.mock.MockInterpreter1");
-	  MockInterpreter2.register("mock2", "com.nflabs.zeppelin.interpreter.mock.MockInterpreter2");
-
-	  System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), tmpDir.getAbsolutePath());
-	  System.setProperty(ConfVars.ZEPPELIN_INTERPRETERS.getVarName(), "com.nflabs.zeppelin.interpreter.mock.MockInterpreter1,com.nflabs.zeppelin.interpreter.mock.MockInterpreter2");
-	  conf = new ZeppelinConfiguration();
-	  factory = new InterpreterFactory(conf, new InterpreterOption(false));
-	  context = new InterpreterContext("id", "title", "text", null, null);
-
-	}
-
-	@After
-	public void tearDown() throws Exception {
-	  delete(tmpDir);
-	}
-
-  private void delete(File file){
-    if(file.isFile()) file.delete();
-    else if(file.isDirectory()){
-      File [] files = file.listFiles();
-      if(files!=null && files.length>0){
-        for(File f : files){
-          delete(f);
-        }
-      }
-      file.delete();
-    }
-  }
-
-	@Test
-	public void testBasic() {
-	  List<String> all = factory.getDefaultInterpreterSettingList();
-
-		// get interpreter
-		Interpreter repl1 = factory.get(all.get(0)).getInterpreterGroup().getFirst();
-		assertFalse(((LazyOpenInterpreter) repl1).isOpen());
-		repl1.interpret("repl1", context);
-		assertTrue(((LazyOpenInterpreter) repl1).isOpen());
-
-		// try to get unavailable interpreter
-		assertNull(factory.get("unknown"));
-
-		// restart interpreter
-		factory.restart(all.get(0));
-		repl1 = factory.get(all.get(0)).getInterpreterGroup().getFirst();
-		assertFalse(((LazyOpenInterpreter) repl1).isOpen());
-	}
-
-  @Test
-  public void testFactoryDefaultList() throws InterpreterException, IOException {
-    // get default list from default setting
-    List<String> all = factory.getDefaultInterpreterSettingList();
-    assertEquals(2, all.size());
-    assertEquals(factory.get(all.get(0)).getInterpreterGroup().getFirst().getClassName(), "com.nflabs.zeppelin.interpreter.mock.MockInterpreter1");
-
-    // add setting
-    factory.add("a mock", "mock2", new InterpreterOption(false), new Properties());
-    all = factory.getDefaultInterpreterSettingList();
-    assertEquals(2, all.size());
-    assertEquals("mock1", factory.get(all.get(0)).getName());
-    assertEquals("a mock", factory.get(all.get(1)).getName());
-  }
-
-  @Test
-  public void testSaveLoad() throws InterpreterException, IOException {
-    // interpreter settings
-    assertEquals(2, factory.get().size());
-
-    // check if file saved
-    assertTrue(new File(conf.getInterpreterSettingPath()).exists());
-
-    factory.add("newsetting", "mock1", new InterpreterOption(false), new Properties());
-    assertEquals(3, factory.get().size());
-
-    InterpreterFactory factory2 = new InterpreterFactory(conf);
-    assertEquals(3, factory2.get().size());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/mock/MockInterpreter1.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/mock/MockInterpreter1.java b/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/mock/MockInterpreter1.java
deleted file mode 100644
index dfdfea7..0000000
--- a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/mock/MockInterpreter1.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.nflabs.zeppelin.interpreter.mock;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-public class MockInterpreter1 extends Interpreter{
-  Map<String, Object> vars = new HashMap<String, Object>();
-
-	public MockInterpreter1(Properties property) {
-		super(property);
-	}
-
-	@Override
-	public void open() {
-	}
-
-	@Override
-	public void close() {
-	}
-
-	@Override
-	public InterpreterResult interpret(String st, InterpreterContext context) {
-		return new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl1: "+st);
-	}
-
-	@Override
-	public void cancel(InterpreterContext context) {
-	}
-
-	@Override
-	public FormType getFormType() {
-		return FormType.SIMPLE;
-	}
-
-	@Override
-	public int getProgress(InterpreterContext context) {
-		return 0;
-	}
-
-	@Override
-	public Scheduler getScheduler() {
-		return SchedulerFactory.singleton().createOrGetFIFOScheduler("test_"+this.hashCode());
-	}
-
-	@Override
-	public List<String> completion(String buf, int cursor) {
-		return null;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/mock/MockInterpreter2.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/mock/MockInterpreter2.java b/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/mock/MockInterpreter2.java
deleted file mode 100644
index c5db654..0000000
--- a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/interpreter/mock/MockInterpreter2.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.nflabs.zeppelin.interpreter.mock;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-public class MockInterpreter2 extends Interpreter{
-  Map<String, Object> vars = new HashMap<String, Object>();
-
-	public MockInterpreter2(Properties property) {
-		super(property);
-	}
-
-	@Override
-	public void open() {
-	}
-
-	@Override
-	public void close() {
-	}
-
-	@Override
-	public InterpreterResult interpret(String st, InterpreterContext context) {
-		return new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl2: "+st);
-	}
-
-	@Override
-	public void cancel(InterpreterContext context) {
-	}
-
-	@Override
-	public FormType getFormType() {
-		return FormType.SIMPLE;
-	}
-
-	@Override
-	public int getProgress(InterpreterContext context) {
-		return 0;
-	}
-
-	@Override
-	public Scheduler getScheduler() {
-		return SchedulerFactory.singleton().createOrGetFIFOScheduler("test_"+this.hashCode());
-	}
-
-	@Override
-	public List<String> completion(String buf, int cursor) {
-		return null;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/notebook/NotebookTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/notebook/NotebookTest.java b/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/notebook/NotebookTest.java
deleted file mode 100644
index 0539b08..0000000
--- a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/notebook/NotebookTest.java
+++ /dev/null
@@ -1,173 +0,0 @@
-package com.nflabs.zeppelin.notebook;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Date;
-import java.util.Map;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.quartz.SchedulerException;
-
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration;
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration.ConfVars;
-import com.nflabs.zeppelin.interpreter.InterpreterFactory;
-import com.nflabs.zeppelin.interpreter.InterpreterOption;
-import com.nflabs.zeppelin.interpreter.mock.MockInterpreter1;
-import com.nflabs.zeppelin.interpreter.mock.MockInterpreter2;
-import com.nflabs.zeppelin.scheduler.Job;
-import com.nflabs.zeppelin.scheduler.Job.Status;
-import com.nflabs.zeppelin.scheduler.JobListener;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-public class NotebookTest implements JobListenerFactory{
-
-	private File tmpDir;
-	private ZeppelinConfiguration conf;
-	private SchedulerFactory schedulerFactory;
-	private File notebookDir;
-	private Notebook notebook;
-  private InterpreterFactory factory;
-
-	@Before
-	public void setUp() throws Exception {
-		tmpDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis());
-		tmpDir.mkdirs();
-		new File(tmpDir, "conf").mkdirs();
-		notebookDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis()+"/notebook");
-		notebookDir.mkdirs();
-
-    System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), tmpDir.getAbsolutePath());
-		System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath());
-		System.setProperty(ConfVars.ZEPPELIN_INTERPRETERS.getVarName(), "com.nflabs.zeppelin.interpreter.mock.MockInterpreter1,com.nflabs.zeppelin.interpreter.mock.MockInterpreter2");
-
-		conf = ZeppelinConfiguration.create();
-
-		this.schedulerFactory = new SchedulerFactory();
-
-    MockInterpreter1.register("mock1", "com.nflabs.zeppelin.interpreter.mock.MockInterpreter1");
-    MockInterpreter2.register("mock2", "com.nflabs.zeppelin.interpreter.mock.MockInterpreter2");
-
-    factory = new InterpreterFactory(conf, new InterpreterOption(false));
-
-		notebook = new Notebook(conf, schedulerFactory, factory, this);
-	}
-
-	@After
-	public void tearDown() throws Exception {
-		delete(tmpDir);
-	}
-
-	@Test
-	public void testSelectingReplImplementation() throws IOException {
-		Note note = notebook.createNote();
-		note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList());
-
-		// run with defatul repl
-		Paragraph p1 = note.addParagraph();
-		p1.setText("hello world");
-		note.run(p1.getId());
-		while(p1.isTerminated()==false || p1.getResult()==null) Thread.yield();
-		assertEquals("repl1: hello world", p1.getResult().message());
-
-		// run with specific repl
-		Paragraph p2 = note.addParagraph();
-		p2.setText("%mock2 hello world");
-		note.run(p2.getId());
-		while(p2.isTerminated()==false || p2.getResult()==null) Thread.yield();
-		assertEquals("repl2: hello world", p2.getResult().message());
-	}
-
-	@Test
-	public void testPersist() throws IOException, SchedulerException{
-		Note note = notebook.createNote();
-
-		// run with default repl
-		Paragraph p1 = note.addParagraph();
-		p1.setText("hello world");
-		note.persist();
-
-		Notebook notebook2 = new Notebook(conf, schedulerFactory, new InterpreterFactory(conf), this);
-		assertEquals(1, notebook2.getAllNotes().size());
-	}
-
-	@Test
-	public void testRunAll() throws IOException {
-		Note note = notebook.createNote();
-    note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList());
-
-		Paragraph p1 = note.addParagraph();
-		p1.setText("p1");
-		Paragraph p2 = note.addParagraph();
-		p2.setText("p2");
-		assertEquals(null, p2.getResult());
-		note.runAll();
-
-		while(p2.isTerminated()==false || p2.getResult()==null) Thread.yield();
-		assertEquals("repl1: p2", p2.getResult().message());
-	}
-
-	@Test
-	public void testSchedule() throws InterruptedException, IOException{
-		// create a note and a paragraph
-		Note note = notebook.createNote();
-    note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList());
-
-		Paragraph p = note.addParagraph();
-		p.setText("p1");
-		Date dateFinished = p.getDateFinished();
-		assertNull(dateFinished);
-
-		// set cron scheduler, once a second
-		Map<String, Object> config = note.getConfig();
-		config.put("cron", "* * * * * ?");
-		note.setConfig(config);
-		notebook.refreshCron(note.id());
-		Thread.sleep(1*1000);
-		dateFinished = p.getDateFinished();
-		assertNotNull(dateFinished);
-
-		// remove cron scheduler.
-		config.put("cron", null);
-		note.setConfig(config);
-		notebook.refreshCron(note.id());
-		Thread.sleep(1*1000);
-		assertEquals(dateFinished, p.getDateFinished());
-	}
-
-	private void delete(File file){
-		if(file.isFile()) file.delete();
-		else if(file.isDirectory()){
-			File [] files = file.listFiles();
-			if(files!=null && files.length>0){
-				for(File f : files){
-					delete(f);
-				}
-			}
-			file.delete();
-		}
-	}
-
-	@Override
-	public JobListener getParagraphJobListener(Note note) {
-		return new JobListener(){
-
-			@Override
-			public void onProgressUpdate(Job job, int progress) {
-			}
-
-			@Override
-			public void beforeStatusChange(Job job, Status before, Status after) {
-			}
-
-			@Override
-			public void afterStatusChange(Job job, Status before, Status after) {
-			}
-		};
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/util/UtilTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/util/UtilTest.java b/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/util/UtilTest.java
deleted file mode 100644
index 715157d..0000000
--- a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/util/UtilTest.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package com.nflabs.zeppelin.util;
-
-import junit.framework.TestCase;
-
-public class UtilTest extends TestCase {
-
-	protected void setUp() throws Exception {
-		super.setUp();
-	}
-
-	protected void tearDown() throws Exception {
-		super.tearDown();
-	}
-
-	public void testSplitIncludingToken() {
-		String[] token = Util.split("hello | \"world '>|hehe\" > next >> sink", new String[]{"|", ">>",  ">"}, true);
-		assertEquals(7, token.length);
-		assertEquals(" \"world '>|hehe\" ", token[2]);
-	}
-
-	public void testSplitExcludingToken() {
-		String[] token = Util.split("hello | \"world '>|hehe\" > next >> sink", new String[]{"|", ">>",  ">"}, false);
-		assertEquals(4, token.length);
-		assertEquals(" \"world '>|hehe\" ", token[1]);
-	}
-	
-	public void testSplitWithSemicolonEnd(){
-		String[] token = Util.split("show tables;", ';');
-		assertEquals(1, token.length);
-		assertEquals("show tables", token[0]);
-	}
-	
-	public void testEscapeTemplate(){
-		String[] token = Util.split("select * from <%=table%> limit 1 > output", '>');
-		assertEquals(2, token.length);
-		assertEquals("output", token[1]);
-	}
-
-	public void testSplit(){
-		String [] op = new String[]{";", "|", ">>", ">"};
-		
-		String str = "CREATE external table news20b_train (\n"+
-			"	rowid int,\n"+
-			"   label int,\n"+
-			"   features ARRAY<STRING>\n"+
-			")\n"+ 
-			"ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' \n"+
-			"COLLECTION ITEMS TERMINATED BY \",\" \n"+ 
-			"STORED AS TEXTFILE;\n";
-		Util.split(str, op, true);
-
-	}
-	
-	public void testSplitDifferentBlockStartEnd(){
-		String [] op = new String[]{";", "|", ">>", ">"};
-		String escapeSeq = "\"',;<%>!";
-		char escapeChar = '\\';
-		String [] blockStart = new String[]{ "\"", "'", "<%", "<", "!"};
-		String [] blockEnd = new String[]{ "\"", "'", "%>", ">", ";" };
-		String [] t = Util.split("!echo a;!echo b;", escapeSeq, escapeChar, blockStart, blockEnd, op, true);
-		assertEquals(4, t.length);
-		assertEquals("!echo a;", t[0]);
-		assertEquals(";", t[1]);
-		assertEquals("!echo b;", t[2]);
-		assertEquals(";", t[3]);
-	}
-	
-	public void testNestedBlock(){
-		String [] op = new String[]{";", "|", ">>", ">"};
-		String escapeSeq = "\"',;<%>!";
-		char escapeChar = '\\';
-		String [] blockStart = new String[]{ "\"", "'", "<%", "N_<", "<", "!"};
-		String [] blockEnd = new String[]{ "\"", "'", "%>", "N_>", ";", ";" };
-		String [] t = Util.split("array <STRUCT<STRING>> tags|aa", escapeSeq, escapeChar, blockStart, blockEnd, op, true);
-		assertEquals(3, t.length);
-		assertEquals("array <STRUCT<STRING>> tags", t[0]);
-		assertEquals("aa", t[2]);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/util/UtilsForTests.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/util/UtilsForTests.java b/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/util/UtilsForTests.java
deleted file mode 100644
index dab7138..0000000
--- a/zeppelin-zengine/src/test/java/com/nflabs/zeppelin/util/UtilsForTests.java
+++ /dev/null
@@ -1,104 +0,0 @@
-package com.nflabs.zeppelin.util;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration;
-
-public class UtilsForTests {
-	
-	public static File createTmpDir() throws Exception {
-		File tmpDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis());
-		tmpDir.mkdir();
-		return tmpDir;
-		
-	}
-	/*
-	private static final String HADOOP_DIST="http://apache.mirror.cdnetworks.com/hadoop/common/hadoop-1.2.1/hadoop-1.2.1-bin.tar.gz";
-	//private static final String HADOOP_DIST="http://www.us.apache.org/dist/hadoop/common/hadoop-1.2.1/hadoop-1.2.1-bin.tar.gz";
-	
-	public static void getHadoop() throws MalformedURLException, IOException{
-		setEnv("HADOOP_HOME", new File("./target/hadoop-1.2.1").getAbsolutePath());
-		if(new File("./target/hadoop-1.2.1").isDirectory()) return;
-		//System.out.println("Downloading a hadoop distribution ... it will take a while");
-		//FileUtils.copyURLToFile(new URL(HADOOP_DIST), new File("/tmp/zp_test_hadoop-bin.tar.gz"));
-		System.out.println("Unarchive hadoop distribution ... ");
-		new File("./target").mkdir();
-		Runtime.getRuntime().exec("tar -xzf /tmp/zp_test_hadoop-bin.tar.gz -C ./target");		
-	}
-	*/
-	
-	public static void delete(File file){
-		if(file.isFile()) file.delete();
-		else if(file.isDirectory()){
-			File [] files = file.listFiles();
-			if(files!=null && files.length>0){
-				for(File f : files){
-					delete(f);
-				}
-			}
-			file.delete();
-		}
-	}
-	
-    /**
-     * Utility method to create a file (if does not exist) and populate it the the given content
-     * 
-     * @param path to file
-     * @param content of the file
-     * @throws IOException
-     */
-    public static void createFileWithContent(String path, String content) throws IOException {
-        File f = new File(path);
-        if (!f.exists()) {
-            stringToFile(content, f);
-        }
-    }
-
-	public static void stringToFile(String string, File file) throws IOException{
-		FileOutputStream out = new FileOutputStream(file);
-		out.write(string.getBytes());
-		out.close();
-	}
-
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public static void setEnv(String k, String v) {
-		Map<String, String> newenv = new HashMap<String, String>();
-		newenv.put(k, v);
-	  try {
-	        Class<?> processEnvironmentClass = Class.forName("java.lang.ProcessEnvironment");
-	        Field theEnvironmentField = processEnvironmentClass.getDeclaredField("theEnvironment");
-	        theEnvironmentField.setAccessible(true);
-            Map<String, String> env = (Map<String, String>) theEnvironmentField.get(null);
-	        env.putAll(newenv);
-	        Field theCaseInsensitiveEnvironmentField = processEnvironmentClass.getDeclaredField("theCaseInsensitiveEnvironment");
-	        theCaseInsensitiveEnvironmentField.setAccessible(true);
-	        Map<String, String> cienv = (Map<String, String>)     theCaseInsensitiveEnvironmentField.get(null);
-	        cienv.putAll(newenv);
-	  } catch (NoSuchFieldException e) {
-	      try {
-	        Class[] classes = Collections.class.getDeclaredClasses();
-	        Map<String, String> env = System.getenv();
-	        for(Class cl : classes) {
-	            if("java.util.Collections$UnmodifiableMap".equals(cl.getName())) {
-	                Field field = cl.getDeclaredField("m");
-	                field.setAccessible(true);
-	                Object obj = field.get(env);
-	                Map<String, String> map = (Map<String, String>) obj;
-	                map.clear();
-	                map.putAll(newenv);
-	            }
-	        }
-	      } catch (Exception e2) {
-	        e2.printStackTrace();
-	      }
-	    } catch (Exception e1) {
-	        e1.printStackTrace();
-	    } 
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterFactoryTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterFactoryTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterFactoryTest.java
new file mode 100644
index 0000000..5199300
--- /dev/null
+++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterFactoryTest.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.zeppelin.conf.ZeppelinConfiguration;
+import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
+import org.apache.zeppelin.interpreter.mock.MockInterpreter1;
+import org.apache.zeppelin.interpreter.mock.MockInterpreter2;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class InterpreterFactoryTest {
+
+	private InterpreterFactory factory;
+  private File tmpDir;
+  private ZeppelinConfiguration conf;
+  private InterpreterContext context;
+
+  @Before
+	public void setUp() throws Exception {
+    tmpDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis());
+    tmpDir.mkdirs();
+    new File(tmpDir, "conf").mkdirs();
+
+	  MockInterpreter1.register("mock1", "org.apache.zeppelin.interpreter.mock.MockInterpreter1");
+	  MockInterpreter2.register("mock2", "org.apache.zeppelin.interpreter.mock.MockInterpreter2");
+
+	  System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), tmpDir.getAbsolutePath());
+	  System.setProperty(ConfVars.ZEPPELIN_INTERPRETERS.getVarName(), "org.apache.zeppelin.interpreter.mock.MockInterpreter1,org.apache.zeppelin.interpreter.mock.MockInterpreter2");
+	  conf = new ZeppelinConfiguration();
+	  factory = new InterpreterFactory(conf, new InterpreterOption(false));
+	  context = new InterpreterContext("id", "title", "text", null, null);
+
+	}
+
+	@After
+	public void tearDown() throws Exception {
+	  delete(tmpDir);
+	}
+
+  private void delete(File file){
+    if(file.isFile()) file.delete();
+    else if(file.isDirectory()){
+      File [] files = file.listFiles();
+      if(files!=null && files.length>0){
+        for(File f : files){
+          delete(f);
+        }
+      }
+      file.delete();
+    }
+  }
+
+	@Test
+	public void testBasic() {
+	  List<String> all = factory.getDefaultInterpreterSettingList();
+
+		// get interpreter
+		Interpreter repl1 = factory.get(all.get(0)).getInterpreterGroup().getFirst();
+		assertFalse(((LazyOpenInterpreter) repl1).isOpen());
+		repl1.interpret("repl1", context);
+		assertTrue(((LazyOpenInterpreter) repl1).isOpen());
+
+		// try to get unavailable interpreter
+		assertNull(factory.get("unknown"));
+
+		// restart interpreter
+		factory.restart(all.get(0));
+		repl1 = factory.get(all.get(0)).getInterpreterGroup().getFirst();
+		assertFalse(((LazyOpenInterpreter) repl1).isOpen());
+	}
+
+  @Test
+  public void testFactoryDefaultList() throws InterpreterException, IOException {
+    // get default list from default setting
+    List<String> all = factory.getDefaultInterpreterSettingList();
+    assertEquals(2, all.size());
+    assertEquals(factory.get(all.get(0)).getInterpreterGroup().getFirst().getClassName(), "org.apache.zeppelin.interpreter.mock.MockInterpreter1");
+
+    // add setting
+    factory.add("a mock", "mock2", new InterpreterOption(false), new Properties());
+    all = factory.getDefaultInterpreterSettingList();
+    assertEquals(2, all.size());
+    assertEquals("mock1", factory.get(all.get(0)).getName());
+    assertEquals("a mock", factory.get(all.get(1)).getName());
+  }
+
+  @Test
+  public void testSaveLoad() throws InterpreterException, IOException {
+    // interpreter settings
+    assertEquals(2, factory.get().size());
+
+    // check if file saved
+    assertTrue(new File(conf.getInterpreterSettingPath()).exists());
+
+    factory.add("newsetting", "mock1", new InterpreterOption(false), new Properties());
+    assertEquals(3, factory.get().size());
+
+    InterpreterFactory factory2 = new InterpreterFactory(conf);
+    assertEquals(3, factory2.get().size());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter1.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter1.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter1.java
new file mode 100644
index 0000000..09259b1
--- /dev/null
+++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter1.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.mock;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+
+public class MockInterpreter1 extends Interpreter{
+  Map<String, Object> vars = new HashMap<String, Object>();
+
+	public MockInterpreter1(Properties property) {
+		super(property);
+	}
+
+	@Override
+	public void open() {
+	}
+
+	@Override
+	public void close() {
+	}
+
+	@Override
+	public InterpreterResult interpret(String st, InterpreterContext context) {
+		return new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl1: "+st);
+	}
+
+	@Override
+	public void cancel(InterpreterContext context) {
+	}
+
+	@Override
+	public FormType getFormType() {
+		return FormType.SIMPLE;
+	}
+
+	@Override
+	public int getProgress(InterpreterContext context) {
+		return 0;
+	}
+
+	@Override
+	public Scheduler getScheduler() {
+		return SchedulerFactory.singleton().createOrGetFIFOScheduler("test_"+this.hashCode());
+	}
+
+	@Override
+	public List<String> completion(String buf, int cursor) {
+		return null;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter2.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter2.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter2.java
new file mode 100644
index 0000000..dd465a5
--- /dev/null
+++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter2.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.mock;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+
+public class MockInterpreter2 extends Interpreter{
+  Map<String, Object> vars = new HashMap<String, Object>();
+
+	public MockInterpreter2(Properties property) {
+		super(property);
+	}
+
+	@Override
+	public void open() {
+	}
+
+	@Override
+	public void close() {
+	}
+
+	@Override
+	public InterpreterResult interpret(String st, InterpreterContext context) {
+		return new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl2: "+st);
+	}
+
+	@Override
+	public void cancel(InterpreterContext context) {
+	}
+
+	@Override
+	public FormType getFormType() {
+		return FormType.SIMPLE;
+	}
+
+	@Override
+	public int getProgress(InterpreterContext context) {
+		return 0;
+	}
+
+	@Override
+	public Scheduler getScheduler() {
+		return SchedulerFactory.singleton().createOrGetFIFOScheduler("test_"+this.hashCode());
+	}
+
+	@Override
+	public List<String> completion(String buf, int cursor) {
+		return null;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java
new file mode 100644
index 0000000..88af541
--- /dev/null
+++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.notebook;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Date;
+import java.util.Map;
+
+import org.apache.zeppelin.conf.ZeppelinConfiguration;
+import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
+import org.apache.zeppelin.interpreter.InterpreterFactory;
+import org.apache.zeppelin.interpreter.InterpreterOption;
+import org.apache.zeppelin.interpreter.mock.MockInterpreter1;
+import org.apache.zeppelin.interpreter.mock.MockInterpreter2;
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.Job.Status;
+import org.apache.zeppelin.scheduler.JobListener;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.quartz.SchedulerException;
+
+public class NotebookTest implements JobListenerFactory{
+
+	private File tmpDir;
+	private ZeppelinConfiguration conf;
+	private SchedulerFactory schedulerFactory;
+	private File notebookDir;
+	private Notebook notebook;
+  private InterpreterFactory factory;
+
+	@Before
+	public void setUp() throws Exception {
+		tmpDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis());
+		tmpDir.mkdirs();
+		new File(tmpDir, "conf").mkdirs();
+		notebookDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis()+"/notebook");
+		notebookDir.mkdirs();
+
+    System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), tmpDir.getAbsolutePath());
+		System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath());
+		System.setProperty(ConfVars.ZEPPELIN_INTERPRETERS.getVarName(), "org.apache.zeppelin.interpreter.mock.MockInterpreter1,org.apache.zeppelin.interpreter.mock.MockInterpreter2");
+
+		conf = ZeppelinConfiguration.create();
+
+		this.schedulerFactory = new SchedulerFactory();
+
+    MockInterpreter1.register("mock1", "org.apache.zeppelin.interpreter.mock.MockInterpreter1");
+    MockInterpreter2.register("mock2", "org.apache.zeppelin.interpreter.mock.MockInterpreter2");
+
+    factory = new InterpreterFactory(conf, new InterpreterOption(false));
+
+		notebook = new Notebook(conf, schedulerFactory, factory, this);
+	}
+
+	@After
+	public void tearDown() throws Exception {
+		delete(tmpDir);
+	}
+
+	@Test
+	public void testSelectingReplImplementation() throws IOException {
+		Note note = notebook.createNote();
+		note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList());
+
+		// run with defatul repl
+		Paragraph p1 = note.addParagraph();
+		p1.setText("hello world");
+		note.run(p1.getId());
+		while(p1.isTerminated()==false || p1.getResult()==null) Thread.yield();
+		assertEquals("repl1: hello world", p1.getResult().message());
+
+		// run with specific repl
+		Paragraph p2 = note.addParagraph();
+		p2.setText("%mock2 hello world");
+		note.run(p2.getId());
+		while(p2.isTerminated()==false || p2.getResult()==null) Thread.yield();
+		assertEquals("repl2: hello world", p2.getResult().message());
+	}
+
+	@Test
+	public void testPersist() throws IOException, SchedulerException{
+		Note note = notebook.createNote();
+
+		// run with default repl
+		Paragraph p1 = note.addParagraph();
+		p1.setText("hello world");
+		note.persist();
+
+		Notebook notebook2 = new Notebook(conf, schedulerFactory, new InterpreterFactory(conf), this);
+		assertEquals(1, notebook2.getAllNotes().size());
+	}
+
+	@Test
+	public void testRunAll() throws IOException {
+		Note note = notebook.createNote();
+    note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList());
+
+		Paragraph p1 = note.addParagraph();
+		p1.setText("p1");
+		Paragraph p2 = note.addParagraph();
+		p2.setText("p2");
+		assertEquals(null, p2.getResult());
+		note.runAll();
+
+		while(p2.isTerminated()==false || p2.getResult()==null) Thread.yield();
+		assertEquals("repl1: p2", p2.getResult().message());
+	}
+
+	@Test
+	public void testSchedule() throws InterruptedException, IOException{
+		// create a note and a paragraph
+		Note note = notebook.createNote();
+    note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList());
+
+		Paragraph p = note.addParagraph();
+		p.setText("p1");
+		Date dateFinished = p.getDateFinished();
+		assertNull(dateFinished);
+
+		// set cron scheduler, once a second
+		Map<String, Object> config = note.getConfig();
+		config.put("cron", "* * * * * ?");
+		note.setConfig(config);
+		notebook.refreshCron(note.id());
+		Thread.sleep(1*1000);
+		dateFinished = p.getDateFinished();
+		assertNotNull(dateFinished);
+
+		// remove cron scheduler.
+		config.put("cron", null);
+		note.setConfig(config);
+		notebook.refreshCron(note.id());
+		Thread.sleep(1*1000);
+		assertEquals(dateFinished, p.getDateFinished());
+	}
+
+	private void delete(File file){
+		if(file.isFile()) file.delete();
+		else if(file.isDirectory()){
+			File [] files = file.listFiles();
+			if(files!=null && files.length>0){
+				for(File f : files){
+					delete(f);
+				}
+			}
+			file.delete();
+		}
+	}
+
+	@Override
+	public JobListener getParagraphJobListener(Note note) {
+		return new JobListener(){
+
+			@Override
+			public void onProgressUpdate(Job job, int progress) {
+			}
+
+			@Override
+			public void beforeStatusChange(Job job, Status before, Status after) {
+			}
+
+			@Override
+			public void afterStatusChange(Job job, Status before, Status after) {
+			}
+		};
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilTest.java
new file mode 100644
index 0000000..e96c824
--- /dev/null
+++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilTest.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.util;
+
+import org.apache.zeppelin.util.Util;
+
+import junit.framework.TestCase;
+
+public class UtilTest extends TestCase {
+
+	@Override
+  protected void setUp() throws Exception {
+		super.setUp();
+	}
+
+	@Override
+  protected void tearDown() throws Exception {
+		super.tearDown();
+	}
+
+	public void testSplitIncludingToken() {
+		String[] token = Util.split("hello | \"world '>|hehe\" > next >> sink", new String[]{"|", ">>",  ">"}, true);
+		assertEquals(7, token.length);
+		assertEquals(" \"world '>|hehe\" ", token[2]);
+	}
+
+	public void testSplitExcludingToken() {
+		String[] token = Util.split("hello | \"world '>|hehe\" > next >> sink", new String[]{"|", ">>",  ">"}, false);
+		assertEquals(4, token.length);
+		assertEquals(" \"world '>|hehe\" ", token[1]);
+	}
+
+	public void testSplitWithSemicolonEnd(){
+		String[] token = Util.split("show tables;", ';');
+		assertEquals(1, token.length);
+		assertEquals("show tables", token[0]);
+	}
+
+	public void testEscapeTemplate(){
+		String[] token = Util.split("select * from <%=table%> limit 1 > output", '>');
+		assertEquals(2, token.length);
+		assertEquals("output", token[1]);
+	}
+
+	public void testSplit(){
+		String [] op = new String[]{";", "|", ">>", ">"};
+
+		String str = "CREATE external table news20b_train (\n"+
+			"	rowid int,\n"+
+			"   label int,\n"+
+			"   features ARRAY<STRING>\n"+
+			")\n"+
+			"ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' \n"+
+			"COLLECTION ITEMS TERMINATED BY \",\" \n"+
+			"STORED AS TEXTFILE;\n";
+		Util.split(str, op, true);
+
+	}
+
+	public void testSplitDifferentBlockStartEnd(){
+		String [] op = new String[]{";", "|", ">>", ">"};
+		String escapeSeq = "\"',;<%>!";
+		char escapeChar = '\\';
+		String [] blockStart = new String[]{ "\"", "'", "<%", "<", "!"};
+		String [] blockEnd = new String[]{ "\"", "'", "%>", ">", ";" };
+		String [] t = Util.split("!echo a;!echo b;", escapeSeq, escapeChar, blockStart, blockEnd, op, true);
+		assertEquals(4, t.length);
+		assertEquals("!echo a;", t[0]);
+		assertEquals(";", t[1]);
+		assertEquals("!echo b;", t[2]);
+		assertEquals(";", t[3]);
+	}
+
+	public void testNestedBlock(){
+		String [] op = new String[]{";", "|", ">>", ">"};
+		String escapeSeq = "\"',;<%>!";
+		char escapeChar = '\\';
+		String [] blockStart = new String[]{ "\"", "'", "<%", "N_<", "<", "!"};
+		String [] blockEnd = new String[]{ "\"", "'", "%>", "N_>", ";", ";" };
+		String [] t = Util.split("array <STRUCT<STRING>> tags|aa", escapeSeq, escapeChar, blockStart, blockEnd, op, true);
+		assertEquals(3, t.length);
+		assertEquals("array <STRUCT<STRING>> tags", t[0]);
+		assertEquals("aa", t[2]);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilsForTests.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilsForTests.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilsForTests.java
new file mode 100644
index 0000000..7700d19
--- /dev/null
+++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilsForTests.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.util;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+public class UtilsForTests {
+
+	public static File createTmpDir() throws Exception {
+		File tmpDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis());
+		tmpDir.mkdir();
+		return tmpDir;
+
+	}
+	/*
+	private static final String HADOOP_DIST="http://apache.mirror.cdnetworks.com/hadoop/common/hadoop-1.2.1/hadoop-1.2.1-bin.tar.gz";
+	//private static final String HADOOP_DIST="http://www.us.apache.org/dist/hadoop/common/hadoop-1.2.1/hadoop-1.2.1-bin.tar.gz";
+
+	public static void getHadoop() throws MalformedURLException, IOException{
+		setEnv("HADOOP_HOME", new File("./target/hadoop-1.2.1").getAbsolutePath());
+		if(new File("./target/hadoop-1.2.1").isDirectory()) return;
+		//System.out.println("Downloading a hadoop distribution ... it will take a while");
+		//FileUtils.copyURLToFile(new URL(HADOOP_DIST), new File("/tmp/zp_test_hadoop-bin.tar.gz"));
+		System.out.println("Unarchive hadoop distribution ... ");
+		new File("./target").mkdir();
+		Runtime.getRuntime().exec("tar -xzf /tmp/zp_test_hadoop-bin.tar.gz -C ./target");
+	}
+	*/
+
+	public static void delete(File file){
+		if(file.isFile()) file.delete();
+		else if(file.isDirectory()){
+			File [] files = file.listFiles();
+			if(files!=null && files.length>0){
+				for(File f : files){
+					delete(f);
+				}
+			}
+			file.delete();
+		}
+	}
+
+    /**
+     * Utility method to create a file (if does not exist) and populate it the the given content
+     *
+     * @param path to file
+     * @param content of the file
+     * @throws IOException
+     */
+    public static void createFileWithContent(String path, String content) throws IOException {
+        File f = new File(path);
+        if (!f.exists()) {
+            stringToFile(content, f);
+        }
+    }
+
+	public static void stringToFile(String string, File file) throws IOException{
+		FileOutputStream out = new FileOutputStream(file);
+		out.write(string.getBytes());
+		out.close();
+	}
+
+	@SuppressWarnings({ "unchecked", "rawtypes" })
+	public static void setEnv(String k, String v) {
+		Map<String, String> newenv = new HashMap<String, String>();
+		newenv.put(k, v);
+	  try {
+	        Class<?> processEnvironmentClass = Class.forName("java.lang.ProcessEnvironment");
+	        Field theEnvironmentField = processEnvironmentClass.getDeclaredField("theEnvironment");
+	        theEnvironmentField.setAccessible(true);
+            Map<String, String> env = (Map<String, String>) theEnvironmentField.get(null);
+	        env.putAll(newenv);
+	        Field theCaseInsensitiveEnvironmentField = processEnvironmentClass.getDeclaredField("theCaseInsensitiveEnvironment");
+	        theCaseInsensitiveEnvironmentField.setAccessible(true);
+	        Map<String, String> cienv = (Map<String, String>)     theCaseInsensitiveEnvironmentField.get(null);
+	        cienv.putAll(newenv);
+	  } catch (NoSuchFieldException e) {
+	      try {
+	        Class[] classes = Collections.class.getDeclaredClasses();
+	        Map<String, String> env = System.getenv();
+	        for(Class cl : classes) {
+	            if("java.util.Collections$UnmodifiableMap".equals(cl.getName())) {
+	                Field field = cl.getDeclaredField("m");
+	                field.setAccessible(true);
+	                Object obj = field.get(env);
+	                Map<String, String> map = (Map<String, String>) obj;
+	                map.clear();
+	                map.putAll(newenv);
+	            }
+	        }
+	      } catch (Exception e2) {
+	        e2.printStackTrace();
+	      }
+	    } catch (Exception e1) {
+	        e1.printStackTrace();
+	    }
+	}
+}


[04/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/java/com/nflabs/zeppelin/ZeppelinIT.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/java/com/nflabs/zeppelin/ZeppelinIT.java b/zeppelin-server/src/test/java/com/nflabs/zeppelin/ZeppelinIT.java
deleted file mode 100644
index c2208ac..0000000
--- a/zeppelin-server/src/test/java/com/nflabs/zeppelin/ZeppelinIT.java
+++ /dev/null
@@ -1,327 +0,0 @@
-package com.nflabs.zeppelin;
-
-import static org.junit.Assert.fail;
-
-import java.io.File;
-import org.junit.Test;
-import org.openqa.selenium.By;
-import org.openqa.selenium.Keys;
-import org.openqa.selenium.NoSuchElementException;
-import org.openqa.selenium.OutputType;
-import org.openqa.selenium.TakesScreenshot;
-import org.openqa.selenium.TimeoutException;
-import org.openqa.selenium.WebDriver;
-import org.openqa.selenium.WebDriverException;
-import org.openqa.selenium.WebElement;
-import org.openqa.selenium.chrome.ChromeDriver;
-import org.openqa.selenium.firefox.FirefoxBinary;
-import org.openqa.selenium.firefox.FirefoxDriver;
-import org.openqa.selenium.firefox.FirefoxProfile;
-import org.openqa.selenium.safari.SafariDriver;
-import org.openqa.selenium.support.ui.ExpectedCondition;
-import org.openqa.selenium.support.ui.WebDriverWait;
-
-public class ZeppelinIT {
-	private WebDriver getWebDriver(){
-		WebDriver driver = null;
-
-		if (driver==null){
-			try {
-				FirefoxBinary ffox = new FirefoxBinary();
-				if ("true".equals(System.getenv("TRAVIS"))) {
-					ffox.setEnvironmentProperty("DISPLAY", ":99"); // xvfb is supposed to run with DISPLAY 99
-				}
-				FirefoxProfile profile = new FirefoxProfile();
-				driver = new FirefoxDriver(ffox, profile);
-			} catch (Exception e){
-			}
-		}
-
-		if (driver==null){
-			try {
-				driver = new ChromeDriver();
-			} catch (Exception e){
-			}
-		}
-
-		if (driver==null){
-			try {
-				driver = new SafariDriver();
-			} catch (Exception e){
-			}
-		}
-
-		String url;
-		if (System.getProperty("url")!=null) {
-			url = System.getProperty("url");
-		} else {
-			url = "http://localhost:8080";
-		}
-
-		long start = System.currentTimeMillis();
-		boolean loaded = false;
-		driver.get(url);
-
-		while (System.currentTimeMillis() - start < 60*1000) {
-	        // wait for page load
-			try {
-		        (new WebDriverWait(driver, 5)).until(new ExpectedCondition<Boolean>() {
-		            public Boolean apply(WebDriver d) {
-		                return d.findElement(By.partialLinkText("Start")).isDisplayed();
-		            }
-		        });
-		        loaded = true;
-		        break;
-			} catch (TimeoutException e){
-				driver.navigate().to(url);
-			}
-		}
-
-		if (loaded==false) {
-			fail();
-		}
-
-		return driver;
-	}
-
-	@Test
-	public void testDisableIT(){
-		//
-	}
-	
-	/*
-    @Test
-    public void testRunSimpleQueryInNewSession() {
-        // Notice that the remainder of the code relies on the interface,
-        // not the implementation.
-        WebDriver driver = getWebDriver();
-
-        try {
-            // click start
-            WebElement start = driver.findElement(By.partialLinkText("Start"));
-            start.click();
-
-            // Wait for the page to load, timeout after 10 seconds
-            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.linkText("Create new Job")).isDisplayed();
-                }
-            });
-
-            // click new
-            driver.findElement(By.linkText("Create new Job")).click();
-
-            // wait for run button appears
-            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.linkText("Run")).isDisplayed();
-                }
-            });
-
-            // type some query
-            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys("create table if not exists test "+Keys.chord(Keys.SHIFT, "9")+"id STRING);\n");
-            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys("\nshow tables;");
-
-            // press run button
-            driver.findElement(By.linkText("Run")).click();
-
-            // wait for button becomes Running ...
-            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.xpath("//div//a[text()='Running ...']")).isDisplayed();
-                }
-            });
-
-            // wait for button becomes Run
-            (new WebDriverWait(driver, 60)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.xpath("//div//a[text()='Run']")).isDisplayed();
-                }
-            });
-
-            WebElement msg = driver.findElement(By.id("msgBox"));
-            if (msg!=null) {
-            	System.out.println("msgBox="+msg.getText());
-            }
-
-            // wait for visualization
-            (new WebDriverWait(driver, 20)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.xpath("//div[@id='visualizationContainer']//iframe")).isDisplayed();
-                }
-            });
-
-            WebDriver iframe = driver.switchTo().frame(driver.findElement(By.xpath("//div[@id='visualizationContainer']//iframe")));
-
-            // wait for result displayed
-            (new WebDriverWait(iframe, 20)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.xpath("//table//td[text()='test']")).isDisplayed();
-                }
-            });
-        } catch (WebDriverException e){
-            File scrFile = ((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE);
-            System.out.println("Screenshot in: " + scrFile.getAbsolutePath());
-            throw e;
-        } finally {
-            // Close the browser
-            driver.quit();
-        }
-    }
-
-*/
-
-    /**
-     * Get the url of Zeppelin
-     *
-     * @param path to add to the url ex: HOST/myPath
-     * @return Zeppelin url HOST:PORT{/PATH}
-     */
-  private String getUrl(String path) {
-    String url;
-    if (System.getProperty("url") != null) {
-      url = System.getProperty("url");
-    } else {
-      url = "http://localhost:8080";
-    }
-    if (path != null)
-      url += path;
-    return url;
-  }
-
-/*
-    @Test
-	public void testZAN() {
-		WebDriver driver = getWebDriver();
-
-		try {
-			// goto ZAN menu
-			driver.findElement(By.xpath("//ul//a[text()='ZAN']")).click();
-
-			// wait for ZAN page loaded
-			(new WebDriverWait(driver, 20)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.xpath("//div//a[text()='Update Catalog']")).isDisplayed();
-                }
-            });
-		} catch (WebDriverException e) {
-			File scrFile = ((TakesScreenshot) driver)
-					.getScreenshotAs(OutputType.FILE);
-			System.out.println("Screenshot in: " + scrFile.getAbsolutePath());
-			throw e;
-		} finally {
-			// Close the browser
-			driver.quit();
-		}
-	}
-*/
-
-
-  /**
-   * Test is swagger-ui is started
-   */
-    /*
-  @Test
-  public void testSwaggerDocumentation() {
-    WebDriver driver = getWebDriver();
-    try {
-
-      driver.get(getUrl("/docs"));
-      // wait for Swagger page loaded
-      (new WebDriverWait(driver, 20)).until(new ExpectedCondition<Boolean>() {
-        public Boolean apply(WebDriver d) {
-          return d.findElement(By.xpath("//div//input[@id='input_apiKey']")).isDisplayed();
-        }
-      });
-
-    } catch (WebDriverException ex) {
-      File scrFile = ((TakesScreenshot) driver).getScreenshotAs(OutputType.FILE);
-      System.out.println("Screenshot in: " + scrFile.getAbsolutePath());
-      throw ex;
-    } finally {
-      driver.close();
-    }
-  }
-
-    @Test
-	public void testAnnotationStmt() {
-        // Notice that the remainder of the code relies on the interface,
-        // not the implementation.
-        WebDriver driver = getWebDriver();
-
-        try {
-            // click start
-            WebElement start = driver.findElement(By.partialLinkText("Start"));
-            start.click();
-
-            // Wait for the page to load, timeout after 10 seconds
-            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.linkText("Create new Job")).isDisplayed();
-                }
-            });
-
-            // click new
-            driver.findElement(By.linkText("Create new Job")).click();
-
-            // wait for run button appears
-            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.linkText("Run")).isDisplayed();
-                }
-            });
-
-            // type some query with default driver
-            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys("@driver set exec;");
-            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys("\necho 'hello world';");
-
-            // press run button
-            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys(Keys.chord(Keys.COMMAND, Keys.ENTER));
-            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys(Keys.chord(Keys.CONTROL, Keys.ENTER));
-            driver.findElement(By.linkText("Run")).click();
-
-            // wait for button becomes Running ...
-            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.xpath("//div//a[text()='Running ...']")).isDisplayed();
-                }
-            });
-
-            // wait for button becomes Run
-            (new WebDriverWait(driver, 60)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.xpath("//div//a[text()='Run']")).isDisplayed();
-                }
-            });
-
-            WebElement msg = driver.findElement(By.id("msgBox"));
-            if (msg!=null) {
-            	System.out.println("msgBox="+msg.getText());
-            }
-
-            // wait for visualization
-            (new WebDriverWait(driver, 20)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.xpath("//div[@id='visualizationContainer']//iframe")).isDisplayed();
-                }
-            });
-
-            WebDriver iframe = driver.switchTo().frame(driver.findElement(By.xpath("//div[@id='visualizationContainer']//iframe")));
-
-            // wait for result displayed
-            (new WebDriverWait(iframe, 20)).until(new ExpectedCondition<Boolean>() {
-                public Boolean apply(WebDriver d) {
-                    return d.findElement(By.xpath("//table//td[text()='hello world']")).isDisplayed();
-                }
-            });
-        } catch (WebDriverException e){
-            File scrFile = ((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE);
-            System.out.println("Screenshot in: " + scrFile.getAbsolutePath());
-            throw e;
-        } finally {
-            // Close the browser
-            driver.quit();
-        }
-	}
-*/	
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/java/com/nflabs/zeppelin/rest/AbstractTestRestApi.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/java/com/nflabs/zeppelin/rest/AbstractTestRestApi.java b/zeppelin-server/src/test/java/com/nflabs/zeppelin/rest/AbstractTestRestApi.java
deleted file mode 100644
index f7a038c..0000000
--- a/zeppelin-server/src/test/java/com/nflabs/zeppelin/rest/AbstractTestRestApi.java
+++ /dev/null
@@ -1,239 +0,0 @@
-package com.nflabs.zeppelin.rest;
-
-import java.io.IOException;
-import java.lang.ref.WeakReference;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.methods.PostMethod;
-import org.apache.commons.httpclient.methods.RequestEntity;
-import org.hamcrest.Description;
-import org.hamcrest.Matcher;
-import org.hamcrest.TypeSafeMatcher;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.JsonElement;
-import com.google.gson.JsonParseException;
-import com.google.gson.JsonParser;
-import com.nflabs.zeppelin.server.ZeppelinServer;
-
-public abstract class AbstractTestRestApi {
-
-  protected static final Logger LOG = LoggerFactory.getLogger(AbstractTestRestApi.class);
-
-  static final String restApiUrl = "/api";
-  static final String url = getUrlToTest();
-  protected static final boolean wasRunning = checkIfServerIsRuning();
-
-  private String getUrl(String path) {
-    String url;
-    if (System.getProperty("url") != null) {
-      url = System.getProperty("url");
-    } else {
-      url = "http://localhost:8080";
-    }
-    url += restApiUrl;
-    if (path != null)
-      url += path;
-    return url;
-  }
-
-  protected static String getUrlToTest() {
-    String url = "http://localhost:8080" + restApiUrl;
-    if (System.getProperty("url") != null) {
-      url = System.getProperty("url");
-    }
-    return url;
-  }
-
-  static ExecutorService executor = Executors.newSingleThreadExecutor();
-  protected static final Runnable server = new Runnable() {
-    @Override
-    public void run() {
-      try {
-        ZeppelinServer.main(new String[] {""});
-      } catch (Exception e) {
-        e.printStackTrace();
-        throw new RuntimeException(e);
-      }
-    }
-  };
-
-  protected static void startUp() throws Exception {
-    if (!wasRunning) {
-      LOG.info("Staring test Zeppelin up...");
-      executor.submit(server);
-      long s = System.currentTimeMillis();
-      boolean started = false;
-      while (System.currentTimeMillis() - s < 1000 * 60 * 3) {  // 3 minutes
-    	  Thread.sleep(2000);
-    	  started = checkIfServerIsRuning();
-    	  if (started == true) {
-    		  break;
-    	  }
-      }
-      if (started == false) {
-    	  throw new RuntimeException("Can not start Zeppelin server");
-      }
-      LOG.info("Test Zeppelin stared.");
-    }
-  }
-
-  protected static void shutDown() {
-    if (!wasRunning) {
-      LOG.info("Terminating test Zeppelin...");
-      executor.shutdown();
-      try {
-        executor.awaitTermination(10, TimeUnit.SECONDS);
-      } catch (InterruptedException e) {
-        // TODO Auto-generated catch block
-        e.printStackTrace();
-      }
-      LOG.info("Test Zeppelin terminated.");
-    }
-  }
-
-  protected static boolean checkIfServerIsRuning() {
-    GetMethod request = null;
-    boolean isRunning = true;
-    try {
-      request = httpGet("/");
-      isRunning = request.getStatusCode() == 200;
-    } catch (IOException e) {
-      isRunning = false;
-    } finally {
-      if (request != null) {
-        request.releaseConnection();
-      }
-    }
-    return isRunning;
-  }
-
-  protected static GetMethod httpGet(String path) throws IOException {
-    LOG.info("Connecting to {}", url + path);
-    HttpClient httpClient = new HttpClient();
-    GetMethod getMethod = new GetMethod(url + path);
-    httpClient.executeMethod(getMethod);
-    LOG.info("{} - {}", getMethod.getStatusCode(), getMethod.getStatusText());
-    return getMethod;
-  }
-
-  protected static PostMethod httpPost(String path, String body) throws IOException {
-    LOG.info("Connecting to {}", url + path);
-    HttpClient httpClient = new HttpClient();
-    PostMethod postMethod = new PostMethod(url + path);
-    RequestEntity entity = new ByteArrayRequestEntity(body.getBytes("UTF-8"));
-    postMethod.setRequestEntity(entity);
-    httpClient.executeMethod(postMethod);
-    LOG.info("{} - {}", postMethod.getStatusCode(), postMethod.getStatusText());
-    return postMethod;
-  }
-
-  protected Matcher<GetMethod> responsesWith(final int expectedStatusCode) {
-    return new TypeSafeMatcher<GetMethod>() {
-      WeakReference<GetMethod> method;
-
-      @Override
-      public boolean matchesSafely(GetMethod getMethod) {
-        method = (method == null) ? new WeakReference<GetMethod>(getMethod) : method;
-        return getMethod.getStatusCode() == expectedStatusCode;
-      }
-
-      @Override
-      public void describeTo(Description description) {
-        description.appendText("HTTP response ").appendValue(expectedStatusCode)
-            .appendText(" from ").appendText(method.get().getPath());
-      }
-
-      @Override
-      protected void describeMismatchSafely(GetMethod item, Description description) {
-        description.appendText("got ").appendValue(item.getStatusCode()).appendText(" ")
-            .appendText(item.getStatusText());
-      }
-    };
-  }
-
-  protected TypeSafeMatcher<String> isJSON() {
-    return new TypeSafeMatcher<String>() {
-      @Override
-      public boolean matchesSafely(String body) {
-        String b = body.trim();
-        return (b.startsWith("{") && b.endsWith("}")) || (b.startsWith("[") && b.endsWith("]"));
-      }
-
-      @Override
-      public void describeTo(Description description) {
-        description.appendText("response in JSON format ");
-      }
-
-      @Override
-      protected void describeMismatchSafely(String item, Description description) {
-        description.appendText("got ").appendText(item);
-      }
-    };
-  }
-
-  protected TypeSafeMatcher<String> isValidJSON() {
-    return new TypeSafeMatcher<String>() {
-      @Override
-      public boolean matchesSafely(String body) {
-        boolean isValid = true;
-        try {
-          new JsonParser().parse(body);
-        } catch (JsonParseException e) {
-          isValid = false;
-        }
-        return isValid;
-      }
-
-      @Override
-      public void describeTo(Description description) {
-        description.appendText("response in JSON format ");
-      }
-
-      @Override
-      protected void describeMismatchSafely(String item, Description description) {
-        description.appendText("got ").appendText(item);
-      }
-    };
-  }
-
-  protected TypeSafeMatcher<? super JsonElement> hasRootElementNamed(final String memberName) {
-    return new TypeSafeMatcher<JsonElement>() {
-      @Override
-      protected boolean matchesSafely(JsonElement item) {
-        return item.isJsonObject() && item.getAsJsonObject().has(memberName);
-      }
-
-      @Override
-      public void describeTo(Description description) {
-        description.appendText("response in JSON format with \"").appendText(memberName)
-            .appendText("\" beeing a root element ");
-      }
-
-      @Override
-      protected void describeMismatchSafely(JsonElement root, Description description) {
-        description.appendText("got ").appendText(root.toString());
-      }
-    };
-  }
-
-  /** Status code matcher */
-  protected Matcher<? super GetMethod> isForbiden() {
-    return responsesWith(403);
-  }
-
-  protected Matcher<? super GetMethod> isAllowed() {
-    return responsesWith(200);
-  }
-
-  protected Matcher<? super GetMethod> isNotAllowed() {
-    return responsesWith(405);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/java/com/nflabs/zeppelin/rest/ZeppelinRestApiTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/java/com/nflabs/zeppelin/rest/ZeppelinRestApiTest.java b/zeppelin-server/src/test/java/com/nflabs/zeppelin/rest/ZeppelinRestApiTest.java
deleted file mode 100644
index 9ccabcd..0000000
--- a/zeppelin-server/src/test/java/com/nflabs/zeppelin/rest/ZeppelinRestApiTest.java
+++ /dev/null
@@ -1,95 +0,0 @@
-package com.nflabs.zeppelin.rest;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.nflabs.zeppelin.notebook.Note;
-import com.nflabs.zeppelin.server.ZeppelinServer;
-/**
- * BASIC Zeppelin rest api tests
- * TODO: Add Post,Put,Delete test and method
- *
- * @author anthonycorbacho
- *
- */
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class ZeppelinRestApiTest extends AbstractTestRestApi {
-  Gson gson = new Gson();
-
-  @BeforeClass
-  public static void init() throws Exception {
-    AbstractTestRestApi.startUp();
-  }
-
-  @AfterClass
-  public static void destroy() {
-    AbstractTestRestApi.shutDown();
-  }
-
-  /***
-   * ROOT API TEST
-   ***/
-  @Test
-  public void getApiRoot() throws IOException {
-    // when
-    GetMethod httpGetRoot = httpGet("/");
-    // then
-    assertThat(httpGetRoot, isAllowed());
-    httpGetRoot.releaseConnection();
-  }
-
-
-  @Test
-  public void getAvailableInterpreters() throws IOException {
-    // when
-    GetMethod get = httpGet("/interpreter");
-
-    // then
-    assertThat(get, isAllowed());
-    Map<String, Object> resp = gson.fromJson(get.getResponseBodyAsString(), new TypeToken<Map<String, Object>>(){}.getType());
-    Map<String, Object> body = (Map<String, Object>) resp.get("body");
-    assertEquals(6, body.size());
-    get.releaseConnection();
-  }
-
-  @Test
-  public void getSettings() throws IOException {
-    // when
-    GetMethod get = httpGet("/interpreter/setting");
-
-    // then
-    Map<String, Object> resp = gson.fromJson(get.getResponseBodyAsString(), new TypeToken<Map<String, Object>>(){}.getType());
-    assertThat(get, isAllowed());
-    get.releaseConnection();
-  }
-
-
-  @Test
-  public void testInterpreterAutoBinding() throws IOException {
-    // create note
-    Note note = ZeppelinServer.notebook.createNote();
-
-    // check interpreter is bindded
-    GetMethod get = httpGet("/notebook/interpreter/bind/"+note.id());
-    assertThat(get, isAllowed());
-    Map<String, Object> resp = gson.fromJson(get.getResponseBodyAsString(), new TypeToken<Map<String, Object>>(){}.getType());
-    List<Map<String, String>> body = (List<Map<String, String>>) resp.get("body");
-    assertTrue(0 < body.size());
-
-    get.releaseConnection();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/java/com/webautomation/ScreenCaptureHtmlUnitDriver.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/java/com/webautomation/ScreenCaptureHtmlUnitDriver.java b/zeppelin-server/src/test/java/com/webautomation/ScreenCaptureHtmlUnitDriver.java
index 473cfeb..ae83bee 100644
--- a/zeppelin-server/src/test/java/com/webautomation/ScreenCaptureHtmlUnitDriver.java
+++ b/zeppelin-server/src/test/java/com/webautomation/ScreenCaptureHtmlUnitDriver.java
@@ -42,7 +42,7 @@ public class ScreenCaptureHtmlUnitDriver extends HtmlUnitDriver implements Takes
     private static Map<String, byte[]> imagesCache = Collections.synchronizedMap(new HashMap<String, byte[]>());
 
     private static Map<String, String> cssjsCache = Collections.synchronizedMap(new HashMap<String, String>());
-    
+
     // http://stackoverflow.com/questions/4652777/java-regex-to-get-the-urls-from-css
     private final static Pattern cssUrlPattern = Pattern.compile("background(-image)?[\\s]*:[^url]*url[\\s]*\\([\\s]*([^\\)]*)[\\s]*\\)[\\s]*");// ?<url>
 
@@ -65,6 +65,7 @@ public class ScreenCaptureHtmlUnitDriver extends HtmlUnitDriver implements Takes
     }
 
     //@Override
+    @Override
     @SuppressWarnings("unchecked")
     public <X> X getScreenshotAs(OutputType<X> target) throws WebDriverException {
         byte[] archive = new byte[0];

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/java/org/apache/zeppelin/ZeppelinIT.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/ZeppelinIT.java b/zeppelin-server/src/test/java/org/apache/zeppelin/ZeppelinIT.java
new file mode 100644
index 0000000..08d3238
--- /dev/null
+++ b/zeppelin-server/src/test/java/org/apache/zeppelin/ZeppelinIT.java
@@ -0,0 +1,338 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin;
+
+import static org.junit.Assert.fail;
+
+import org.junit.Test;
+import org.openqa.selenium.By;
+import org.openqa.selenium.TimeoutException;
+import org.openqa.selenium.WebDriver;
+import org.openqa.selenium.chrome.ChromeDriver;
+import org.openqa.selenium.firefox.FirefoxBinary;
+import org.openqa.selenium.firefox.FirefoxDriver;
+import org.openqa.selenium.firefox.FirefoxProfile;
+import org.openqa.selenium.safari.SafariDriver;
+import org.openqa.selenium.support.ui.ExpectedCondition;
+import org.openqa.selenium.support.ui.WebDriverWait;
+
+public class ZeppelinIT {
+	private WebDriver getWebDriver(){
+		WebDriver driver = null;
+
+		if (driver==null){
+			try {
+				FirefoxBinary ffox = new FirefoxBinary();
+				if ("true".equals(System.getenv("TRAVIS"))) {
+					ffox.setEnvironmentProperty("DISPLAY", ":99"); // xvfb is supposed to run with DISPLAY 99
+				}
+				FirefoxProfile profile = new FirefoxProfile();
+				driver = new FirefoxDriver(ffox, profile);
+			} catch (Exception e){
+			}
+		}
+
+		if (driver==null){
+			try {
+				driver = new ChromeDriver();
+			} catch (Exception e){
+			}
+		}
+
+		if (driver==null){
+			try {
+				driver = new SafariDriver();
+			} catch (Exception e){
+			}
+		}
+
+		String url;
+		if (System.getProperty("url")!=null) {
+			url = System.getProperty("url");
+		} else {
+			url = "http://localhost:8080";
+		}
+
+		long start = System.currentTimeMillis();
+		boolean loaded = false;
+		driver.get(url);
+
+		while (System.currentTimeMillis() - start < 60*1000) {
+	        // wait for page load
+			try {
+		        (new WebDriverWait(driver, 5)).until(new ExpectedCondition<Boolean>() {
+		            @Override
+                public Boolean apply(WebDriver d) {
+		                return d.findElement(By.partialLinkText("Start")).isDisplayed();
+		            }
+		        });
+		        loaded = true;
+		        break;
+			} catch (TimeoutException e){
+				driver.navigate().to(url);
+			}
+		}
+
+		if (loaded==false) {
+			fail();
+		}
+
+		return driver;
+	}
+
+	@Test
+	public void testDisableIT(){
+		//
+	}
+
+	/*
+    @Test
+    public void testRunSimpleQueryInNewSession() {
+        // Notice that the remainder of the code relies on the interface,
+        // not the implementation.
+        WebDriver driver = getWebDriver();
+
+        try {
+            // click start
+            WebElement start = driver.findElement(By.partialLinkText("Start"));
+            start.click();
+
+            // Wait for the page to load, timeout after 10 seconds
+            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.linkText("Create new Job")).isDisplayed();
+                }
+            });
+
+            // click new
+            driver.findElement(By.linkText("Create new Job")).click();
+
+            // wait for run button appears
+            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.linkText("Run")).isDisplayed();
+                }
+            });
+
+            // type some query
+            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys("create table if not exists test "+Keys.chord(Keys.SHIFT, "9")+"id STRING);\n");
+            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys("\nshow tables;");
+
+            // press run button
+            driver.findElement(By.linkText("Run")).click();
+
+            // wait for button becomes Running ...
+            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.xpath("//div//a[text()='Running ...']")).isDisplayed();
+                }
+            });
+
+            // wait for button becomes Run
+            (new WebDriverWait(driver, 60)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.xpath("//div//a[text()='Run']")).isDisplayed();
+                }
+            });
+
+            WebElement msg = driver.findElement(By.id("msgBox"));
+            if (msg!=null) {
+            	System.out.println("msgBox="+msg.getText());
+            }
+
+            // wait for visualization
+            (new WebDriverWait(driver, 20)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.xpath("//div[@id='visualizationContainer']//iframe")).isDisplayed();
+                }
+            });
+
+            WebDriver iframe = driver.switchTo().frame(driver.findElement(By.xpath("//div[@id='visualizationContainer']//iframe")));
+
+            // wait for result displayed
+            (new WebDriverWait(iframe, 20)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.xpath("//table//td[text()='test']")).isDisplayed();
+                }
+            });
+        } catch (WebDriverException e){
+            File scrFile = ((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE);
+            System.out.println("Screenshot in: " + scrFile.getAbsolutePath());
+            throw e;
+        } finally {
+            // Close the browser
+            driver.quit();
+        }
+    }
+
+*/
+
+    /**
+     * Get the url of Zeppelin
+     *
+     * @param path to add to the url ex: HOST/myPath
+     * @return Zeppelin url HOST:PORT{/PATH}
+     */
+  private String getUrl(String path) {
+    String url;
+    if (System.getProperty("url") != null) {
+      url = System.getProperty("url");
+    } else {
+      url = "http://localhost:8080";
+    }
+    if (path != null)
+      url += path;
+    return url;
+  }
+
+/*
+    @Test
+	public void testZAN() {
+		WebDriver driver = getWebDriver();
+
+		try {
+			// goto ZAN menu
+			driver.findElement(By.xpath("//ul//a[text()='ZAN']")).click();
+
+			// wait for ZAN page loaded
+			(new WebDriverWait(driver, 20)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.xpath("//div//a[text()='Update Catalog']")).isDisplayed();
+                }
+            });
+		} catch (WebDriverException e) {
+			File scrFile = ((TakesScreenshot) driver)
+					.getScreenshotAs(OutputType.FILE);
+			System.out.println("Screenshot in: " + scrFile.getAbsolutePath());
+			throw e;
+		} finally {
+			// Close the browser
+			driver.quit();
+		}
+	}
+*/
+
+
+  /**
+   * Test is swagger-ui is started
+   */
+    /*
+  @Test
+  public void testSwaggerDocumentation() {
+    WebDriver driver = getWebDriver();
+    try {
+
+      driver.get(getUrl("/docs"));
+      // wait for Swagger page loaded
+      (new WebDriverWait(driver, 20)).until(new ExpectedCondition<Boolean>() {
+        public Boolean apply(WebDriver d) {
+          return d.findElement(By.xpath("//div//input[@id='input_apiKey']")).isDisplayed();
+        }
+      });
+
+    } catch (WebDriverException ex) {
+      File scrFile = ((TakesScreenshot) driver).getScreenshotAs(OutputType.FILE);
+      System.out.println("Screenshot in: " + scrFile.getAbsolutePath());
+      throw ex;
+    } finally {
+      driver.close();
+    }
+  }
+
+    @Test
+	public void testAnnotationStmt() {
+        // Notice that the remainder of the code relies on the interface,
+        // not the implementation.
+        WebDriver driver = getWebDriver();
+
+        try {
+            // click start
+            WebElement start = driver.findElement(By.partialLinkText("Start"));
+            start.click();
+
+            // Wait for the page to load, timeout after 10 seconds
+            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.linkText("Create new Job")).isDisplayed();
+                }
+            });
+
+            // click new
+            driver.findElement(By.linkText("Create new Job")).click();
+
+            // wait for run button appears
+            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.linkText("Run")).isDisplayed();
+                }
+            });
+
+            // type some query with default driver
+            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys("@driver set exec;");
+            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys("\necho 'hello world';");
+
+            // press run button
+            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys(Keys.chord(Keys.COMMAND, Keys.ENTER));
+            driver.findElement(By.xpath("//div[@id='zqlEditor']//textarea")).sendKeys(Keys.chord(Keys.CONTROL, Keys.ENTER));
+            driver.findElement(By.linkText("Run")).click();
+
+            // wait for button becomes Running ...
+            (new WebDriverWait(driver, 10)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.xpath("//div//a[text()='Running ...']")).isDisplayed();
+                }
+            });
+
+            // wait for button becomes Run
+            (new WebDriverWait(driver, 60)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.xpath("//div//a[text()='Run']")).isDisplayed();
+                }
+            });
+
+            WebElement msg = driver.findElement(By.id("msgBox"));
+            if (msg!=null) {
+            	System.out.println("msgBox="+msg.getText());
+            }
+
+            // wait for visualization
+            (new WebDriverWait(driver, 20)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.xpath("//div[@id='visualizationContainer']//iframe")).isDisplayed();
+                }
+            });
+
+            WebDriver iframe = driver.switchTo().frame(driver.findElement(By.xpath("//div[@id='visualizationContainer']//iframe")));
+
+            // wait for result displayed
+            (new WebDriverWait(iframe, 20)).until(new ExpectedCondition<Boolean>() {
+                public Boolean apply(WebDriver d) {
+                    return d.findElement(By.xpath("//table//td[text()='hello world']")).isDisplayed();
+                }
+            });
+        } catch (WebDriverException e){
+            File scrFile = ((TakesScreenshot)driver).getScreenshotAs(OutputType.FILE);
+            System.out.println("Screenshot in: " + scrFile.getAbsolutePath());
+            throw e;
+        } finally {
+            // Close the browser
+            driver.quit();
+        }
+	}
+*/
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
new file mode 100644
index 0000000..ac40dda
--- /dev/null
+++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
@@ -0,0 +1,256 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.rest;
+
+import java.io.IOException;
+import java.lang.ref.WeakReference;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.httpclient.HttpClient;
+import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
+import org.apache.commons.httpclient.methods.GetMethod;
+import org.apache.commons.httpclient.methods.PostMethod;
+import org.apache.commons.httpclient.methods.RequestEntity;
+import org.apache.zeppelin.server.ZeppelinServer;
+import org.hamcrest.Description;
+import org.hamcrest.Matcher;
+import org.hamcrest.TypeSafeMatcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.JsonElement;
+import com.google.gson.JsonParseException;
+import com.google.gson.JsonParser;
+
+public abstract class AbstractTestRestApi {
+
+  protected static final Logger LOG = LoggerFactory.getLogger(AbstractTestRestApi.class);
+
+  static final String restApiUrl = "/api";
+  static final String url = getUrlToTest();
+  protected static final boolean wasRunning = checkIfServerIsRuning();
+
+  private String getUrl(String path) {
+    String url;
+    if (System.getProperty("url") != null) {
+      url = System.getProperty("url");
+    } else {
+      url = "http://localhost:8080";
+    }
+    url += restApiUrl;
+    if (path != null)
+      url += path;
+    return url;
+  }
+
+  protected static String getUrlToTest() {
+    String url = "http://localhost:8080" + restApiUrl;
+    if (System.getProperty("url") != null) {
+      url = System.getProperty("url");
+    }
+    return url;
+  }
+
+  static ExecutorService executor = Executors.newSingleThreadExecutor();
+  protected static final Runnable server = new Runnable() {
+    @Override
+    public void run() {
+      try {
+        ZeppelinServer.main(new String[] {""});
+      } catch (Exception e) {
+        e.printStackTrace();
+        throw new RuntimeException(e);
+      }
+    }
+  };
+
+  protected static void startUp() throws Exception {
+    if (!wasRunning) {
+      LOG.info("Staring test Zeppelin up...");
+      executor.submit(server);
+      long s = System.currentTimeMillis();
+      boolean started = false;
+      while (System.currentTimeMillis() - s < 1000 * 60 * 3) {  // 3 minutes
+    	  Thread.sleep(2000);
+    	  started = checkIfServerIsRuning();
+    	  if (started == true) {
+    		  break;
+    	  }
+      }
+      if (started == false) {
+    	  throw new RuntimeException("Can not start Zeppelin server");
+      }
+      LOG.info("Test Zeppelin stared.");
+    }
+  }
+
+  protected static void shutDown() {
+    if (!wasRunning) {
+      LOG.info("Terminating test Zeppelin...");
+      executor.shutdown();
+      try {
+        executor.awaitTermination(10, TimeUnit.SECONDS);
+      } catch (InterruptedException e) {
+        // TODO Auto-generated catch block
+        e.printStackTrace();
+      }
+      LOG.info("Test Zeppelin terminated.");
+    }
+  }
+
+  protected static boolean checkIfServerIsRuning() {
+    GetMethod request = null;
+    boolean isRunning = true;
+    try {
+      request = httpGet("/");
+      isRunning = request.getStatusCode() == 200;
+    } catch (IOException e) {
+      isRunning = false;
+    } finally {
+      if (request != null) {
+        request.releaseConnection();
+      }
+    }
+    return isRunning;
+  }
+
+  protected static GetMethod httpGet(String path) throws IOException {
+    LOG.info("Connecting to {}", url + path);
+    HttpClient httpClient = new HttpClient();
+    GetMethod getMethod = new GetMethod(url + path);
+    httpClient.executeMethod(getMethod);
+    LOG.info("{} - {}", getMethod.getStatusCode(), getMethod.getStatusText());
+    return getMethod;
+  }
+
+  protected static PostMethod httpPost(String path, String body) throws IOException {
+    LOG.info("Connecting to {}", url + path);
+    HttpClient httpClient = new HttpClient();
+    PostMethod postMethod = new PostMethod(url + path);
+    RequestEntity entity = new ByteArrayRequestEntity(body.getBytes("UTF-8"));
+    postMethod.setRequestEntity(entity);
+    httpClient.executeMethod(postMethod);
+    LOG.info("{} - {}", postMethod.getStatusCode(), postMethod.getStatusText());
+    return postMethod;
+  }
+
+  protected Matcher<GetMethod> responsesWith(final int expectedStatusCode) {
+    return new TypeSafeMatcher<GetMethod>() {
+      WeakReference<GetMethod> method;
+
+      @Override
+      public boolean matchesSafely(GetMethod getMethod) {
+        method = (method == null) ? new WeakReference<GetMethod>(getMethod) : method;
+        return getMethod.getStatusCode() == expectedStatusCode;
+      }
+
+      @Override
+      public void describeTo(Description description) {
+        description.appendText("HTTP response ").appendValue(expectedStatusCode)
+            .appendText(" from ").appendText(method.get().getPath());
+      }
+
+      @Override
+      protected void describeMismatchSafely(GetMethod item, Description description) {
+        description.appendText("got ").appendValue(item.getStatusCode()).appendText(" ")
+            .appendText(item.getStatusText());
+      }
+    };
+  }
+
+  protected TypeSafeMatcher<String> isJSON() {
+    return new TypeSafeMatcher<String>() {
+      @Override
+      public boolean matchesSafely(String body) {
+        String b = body.trim();
+        return (b.startsWith("{") && b.endsWith("}")) || (b.startsWith("[") && b.endsWith("]"));
+      }
+
+      @Override
+      public void describeTo(Description description) {
+        description.appendText("response in JSON format ");
+      }
+
+      @Override
+      protected void describeMismatchSafely(String item, Description description) {
+        description.appendText("got ").appendText(item);
+      }
+    };
+  }
+
+  protected TypeSafeMatcher<String> isValidJSON() {
+    return new TypeSafeMatcher<String>() {
+      @Override
+      public boolean matchesSafely(String body) {
+        boolean isValid = true;
+        try {
+          new JsonParser().parse(body);
+        } catch (JsonParseException e) {
+          isValid = false;
+        }
+        return isValid;
+      }
+
+      @Override
+      public void describeTo(Description description) {
+        description.appendText("response in JSON format ");
+      }
+
+      @Override
+      protected void describeMismatchSafely(String item, Description description) {
+        description.appendText("got ").appendText(item);
+      }
+    };
+  }
+
+  protected TypeSafeMatcher<? super JsonElement> hasRootElementNamed(final String memberName) {
+    return new TypeSafeMatcher<JsonElement>() {
+      @Override
+      protected boolean matchesSafely(JsonElement item) {
+        return item.isJsonObject() && item.getAsJsonObject().has(memberName);
+      }
+
+      @Override
+      public void describeTo(Description description) {
+        description.appendText("response in JSON format with \"").appendText(memberName)
+            .appendText("\" beeing a root element ");
+      }
+
+      @Override
+      protected void describeMismatchSafely(JsonElement root, Description description) {
+        description.appendText("got ").appendText(root.toString());
+      }
+    };
+  }
+
+  /** Status code matcher */
+  protected Matcher<? super GetMethod> isForbiden() {
+    return responsesWith(403);
+  }
+
+  protected Matcher<? super GetMethod> isAllowed() {
+    return responsesWith(200);
+  }
+
+  protected Matcher<? super GetMethod> isNotAllowed() {
+    return responsesWith(405);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
new file mode 100644
index 0000000..3e63503
--- /dev/null
+++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.rest;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.httpclient.methods.GetMethod;
+import org.apache.zeppelin.notebook.Note;
+import org.apache.zeppelin.server.ZeppelinServer;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+/**
+ * BASIC Zeppelin rest api tests
+ * TODO: Add Post,Put,Delete test and method
+ *
+ * @author anthonycorbacho
+ *
+ */
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class ZeppelinRestApiTest extends AbstractTestRestApi {
+  Gson gson = new Gson();
+
+  @BeforeClass
+  public static void init() throws Exception {
+    AbstractTestRestApi.startUp();
+  }
+
+  @AfterClass
+  public static void destroy() {
+    AbstractTestRestApi.shutDown();
+  }
+
+  /***
+   * ROOT API TEST
+   ***/
+  @Test
+  public void getApiRoot() throws IOException {
+    // when
+    GetMethod httpGetRoot = httpGet("/");
+    // then
+    assertThat(httpGetRoot, isAllowed());
+    httpGetRoot.releaseConnection();
+  }
+
+
+  @Test
+  public void getAvailableInterpreters() throws IOException {
+    // when
+    GetMethod get = httpGet("/interpreter");
+
+    // then
+    assertThat(get, isAllowed());
+    Map<String, Object> resp = gson.fromJson(get.getResponseBodyAsString(), new TypeToken<Map<String, Object>>(){}.getType());
+    Map<String, Object> body = (Map<String, Object>) resp.get("body");
+    assertEquals(6, body.size());
+    get.releaseConnection();
+  }
+
+  @Test
+  public void getSettings() throws IOException {
+    // when
+    GetMethod get = httpGet("/interpreter/setting");
+
+    // then
+    Map<String, Object> resp = gson.fromJson(get.getResponseBodyAsString(), new TypeToken<Map<String, Object>>(){}.getType());
+    assertThat(get, isAllowed());
+    get.releaseConnection();
+  }
+
+
+  @Test
+  public void testInterpreterAutoBinding() throws IOException {
+    // create note
+    Note note = ZeppelinServer.notebook.createNote();
+
+    // check interpreter is bindded
+    GetMethod get = httpGet("/notebook/interpreter/bind/"+note.id());
+    assertThat(get, isAllowed());
+    Map<String, Object> resp = gson.fromJson(get.getResponseBodyAsString(), new TypeToken<Map<String, Object>>(){}.getType());
+    List<Map<String, String>> body = (List<Map<String, String>>) resp.get("body");
+    assertTrue(0 < body.size());
+
+    get.releaseConnection();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/resources/log4j.properties b/zeppelin-server/src/test/resources/log4j.properties
index 14cea37..376ce00 100644
--- a/zeppelin-server/src/test/resources/log4j.properties
+++ b/zeppelin-server/src/test/resources/log4j.properties
@@ -1,3 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 # Direct log messages to stdout
 log4j.appender.stdout=org.apache.log4j.ConsoleAppender
 log4j.appender.stdout.Target=System.out
@@ -16,7 +33,7 @@ log4j.logger.org.apache.hadoop.mapred=WARN
 log4j.logger.org.apache.hadoop.hive.ql=WARN
 log4j.logger.org.apache.hadoop.hive.metastore=WARN
 log4j.logger.org.apache.haadoop.hive.service.HiveServer=WARN
-log4j.logger.com.nflabs.zeppelin.scheduler=WARN
+log4j.logger.org.apache.zeppelin.scheduler=WARN
 
 log4j.logger.org.quartz=WARN
 log4j.logger.DataNucleus=WARN

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/scala/com/nflabs/zeppelin/AbstractFunctionalSuite.scala
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/scala/com/nflabs/zeppelin/AbstractFunctionalSuite.scala b/zeppelin-server/src/test/scala/com/nflabs/zeppelin/AbstractFunctionalSuite.scala
deleted file mode 100644
index ff0f1cf..0000000
--- a/zeppelin-server/src/test/scala/com/nflabs/zeppelin/AbstractFunctionalSuite.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-package com.nflabs.zeppelin
-
-import com.nflabs.zeppelin.AbstractFunctionalSuite.SERVER_ADDRESS
-import org.openqa.selenium.WebDriver
-import org.openqa.selenium.chrome.ChromeDriver
-import org.openqa.selenium.firefox.{FirefoxBinary, FirefoxDriver, FirefoxProfile}
-import org.openqa.selenium.safari.SafariDriver
-import org.scalatest.concurrent.Eventually._
-import org.scalatest.time._
-import org.scalatest.selenium.WebBrowser
-import org.scalatest.{BeforeAndAfterAll, FunSuite, Suite}
-
-import scala.sys.process._
-import scala.util.Try
-
-object AbstractFunctionalSuite {
-  val SERVER_ADDRESS = "http://localhost:8080"
-}
-
-class AbstractFunctionalSuite extends FunSuite with WebBrowser with BeforeAndAfterAll {
-
-  implicit val webDriver = getDriver()
-
-  override def beforeAll() = {
-    "../bin/zeppelin-daemon.sh start" !
-
-    eventually (timeout(Span(20, Seconds))) {
-      go to SERVER_ADDRESS
-      assert(find("welcome").isDefined)
-    }
-  }
-
-  override def nestedSuites =
-    List[Suite](new WelcomePageSuite).toIndexedSeq
-
-  override def afterAll() = {
-    "../bin/zeppelin-daemon.sh stop" !
-  }
-
-  def getDriver(): WebDriver = {
-    val possibleDrivers = List[() => WebDriver](safary, chrome, firefox)
-    val createdDriver = possibleDrivers.map(driverFactory => Try(driverFactory.apply())).find(_.isSuccess)
-    createdDriver match {
-      case Some(driver) => driver.get
-      case None => throw new RuntimeException("Could not initialize any driver")
-    }
-  }
-
-  def safary(): WebDriver = {
-    new SafariDriver()
-  }
-
-  def chrome(): WebDriver = {
-    new ChromeDriver()
-  }
-
-  def firefox(): WebDriver = {
-    val ffox: FirefoxBinary = new FirefoxBinary
-    if ("true" == System.getenv("TRAVIS")) {
-      ffox.setEnvironmentProperty("DISPLAY", ":99")
-    }
-    val profile: FirefoxProfile = new FirefoxProfile
-    new FirefoxDriver(ffox, profile)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/scala/com/nflabs/zeppelin/WelcomePageSuite.scala
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/scala/com/nflabs/zeppelin/WelcomePageSuite.scala b/zeppelin-server/src/test/scala/com/nflabs/zeppelin/WelcomePageSuite.scala
deleted file mode 100644
index c02df2b..0000000
--- a/zeppelin-server/src/test/scala/com/nflabs/zeppelin/WelcomePageSuite.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.nflabs.zeppelin
-
-import org.openqa.selenium.WebDriver
-import org.scalatest.concurrent.Eventually._
-import org.scalatest.time._
-import org.scalatest.selenium.WebBrowser
-import org.scalatest.{DoNotDiscover, FunSuite}
-import AbstractFunctionalSuite.SERVER_ADDRESS
-
-@DoNotDiscover
-class WelcomePageSuite(implicit driver: WebDriver) extends FunSuite with WebBrowser {
-
-  test("Welcome sign is correct") {
-    eventually (timeout(Span(20, Seconds))) {
-      go to SERVER_ADDRESS
-      assert(find("welcome").isDefined)
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/scala/org/apache/zeppelin/AbstractFunctionalSuite.scala
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/scala/org/apache/zeppelin/AbstractFunctionalSuite.scala b/zeppelin-server/src/test/scala/org/apache/zeppelin/AbstractFunctionalSuite.scala
new file mode 100644
index 0000000..a83ab5b
--- /dev/null
+++ b/zeppelin-server/src/test/scala/org/apache/zeppelin/AbstractFunctionalSuite.scala
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin
+
+import org.apache.zeppelin.AbstractFunctionalSuite.SERVER_ADDRESS
+import org.openqa.selenium.WebDriver
+import org.openqa.selenium.chrome.ChromeDriver
+import org.openqa.selenium.firefox.{FirefoxBinary, FirefoxDriver, FirefoxProfile}
+import org.openqa.selenium.safari.SafariDriver
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.time._
+import org.scalatest.selenium.WebBrowser
+import org.scalatest.{BeforeAndAfterAll, FunSuite, Suite}
+
+import scala.sys.process._
+import scala.util.Try
+
+object AbstractFunctionalSuite {
+  val SERVER_ADDRESS = "http://localhost:8080"
+}
+
+class AbstractFunctionalSuite extends FunSuite with WebBrowser with BeforeAndAfterAll {
+
+  implicit val webDriver = getDriver()
+
+  override def beforeAll() = {
+    "../bin/zeppelin-daemon.sh start" !
+
+    eventually (timeout(Span(20, Seconds))) {
+      go to SERVER_ADDRESS
+      assert(find("welcome").isDefined)
+    }
+  }
+
+  override def nestedSuites =
+    List[Suite](new WelcomePageSuite).toIndexedSeq
+
+  override def afterAll() = {
+    "../bin/zeppelin-daemon.sh stop" !
+  }
+
+  def getDriver(): WebDriver = {
+    val possibleDrivers = List[() => WebDriver](safary, chrome, firefox)
+    val createdDriver = possibleDrivers.map(driverFactory => Try(driverFactory.apply())).find(_.isSuccess)
+    createdDriver match {
+      case Some(driver) => driver.get
+      case None => throw new RuntimeException("Could not initialize any driver")
+    }
+  }
+
+  def safary(): WebDriver = {
+    new SafariDriver()
+  }
+
+  def chrome(): WebDriver = {
+    new ChromeDriver()
+  }
+
+  def firefox(): WebDriver = {
+    val ffox: FirefoxBinary = new FirefoxBinary
+    if ("true" == System.getenv("TRAVIS")) {
+      ffox.setEnvironmentProperty("DISPLAY", ":99")
+    }
+    val profile: FirefoxProfile = new FirefoxProfile
+    new FirefoxDriver(ffox, profile)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/test/scala/org/apache/zeppelin/WelcomePageSuite.scala
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/test/scala/org/apache/zeppelin/WelcomePageSuite.scala b/zeppelin-server/src/test/scala/org/apache/zeppelin/WelcomePageSuite.scala
new file mode 100644
index 0000000..3ce534a
--- /dev/null
+++ b/zeppelin-server/src/test/scala/org/apache/zeppelin/WelcomePageSuite.scala
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin
+
+import org.openqa.selenium.WebDriver
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.time._
+import org.scalatest.selenium.WebBrowser
+import org.scalatest.{DoNotDiscover, FunSuite}
+import AbstractFunctionalSuite.SERVER_ADDRESS
+
+@DoNotDiscover
+class WelcomePageSuite(implicit driver: WebDriver) extends FunSuite with WebBrowser {
+
+  test("Welcome sign is correct") {
+    eventually (timeout(Span(20, Seconds))) {
+      go to SERVER_ADDRESS
+      assert(find("welcome").isDefined)
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/Gruntfile.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/Gruntfile.js b/zeppelin-web/Gruntfile.js
index 77900b1..d544ba4 100644
--- a/zeppelin-web/Gruntfile.js
+++ b/zeppelin-web/Gruntfile.js
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 // Generated on 2014-08-29 using generator-angular 0.9.5
 'use strict';
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/404.html
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/404.html b/zeppelin-web/app/404.html
index ec98e3c..45cc829 100644
--- a/zeppelin-web/app/404.html
+++ b/zeppelin-web/app/404.html
@@ -1,3 +1,17 @@
+<!--
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
 <!DOCTYPE html>
 <html lang="en">
   <head>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/WEB-INF/web.xml b/zeppelin-web/app/WEB-INF/web.xml
index 9a04fab..f34da18 100644
--- a/zeppelin-web/app/WEB-INF/web.xml
+++ b/zeppelin-web/app/WEB-INF/web.xml
@@ -1,4 +1,21 @@
 <?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 	xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
 	version="2.5">
@@ -9,7 +26,7 @@
 		<servlet-class>com.sun.jersey.spi.container.servlet.ServletContainer</servlet-class>
 		<init-param>
 			<param-name>com.sun.jersey.config.property.packages</param-name>
-			<param-value>com.nflabs.zeppelin.rest;com.wordnik.swagger.jersey.listing</param-value>
+			<param-value>org.apache.zeppelin.rest;com.wordnik.swagger.jersey.listing</param-value>
 		</init-param>
 		<load-on-startup>1</load-on-startup>
 	</servlet>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/fonts/custom-font.svg
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/fonts/custom-font.svg b/zeppelin-web/app/fonts/custom-font.svg
index fb2769a..55756b1 100644
--- a/zeppelin-web/app/fonts/custom-font.svg
+++ b/zeppelin-web/app/fonts/custom-font.svg
@@ -1,4 +1,20 @@
 <?xml version="1.0" standalone="no"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
 <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
 <svg xmlns="http://www.w3.org/2000/svg">
 <metadata>
@@ -22,4 +38,4 @@
 <missing-glyph horiz-adv-x="2048" />
 <glyph unicode="&#x20;" d="" horiz-adv-x="1024" />
 <glyph unicode="&#xe800;" d="M2340.572 36.572v-146.286h-2340.572v1755.428h146.286v-1609.142h2194.286zM681.142 443.428q0-60.572-42.858-103.428t-103.428-42.858-103.428 42.858-42.858 103.428 42.858 103.428 103.428 42.858 103.428-42.858 42.858-103.428zM921.142 889.142q0-60.572-42.858-103.428t-103.428-42.858-103.428 42.858-42.858 103.428 42.858 103.428 103.428 42.858 103.428-42.858 42.858-103.428zM1298.286 601.142q0-60.572-42.858-103.428t-103.428-42.858-103.428 42.858-42.858 103.428 42.858 103.428 103.428 42.858 103.428-42.858 42.858-103.428zM1488 1061.714q0-60.572-42.858-103.428t-103.428-42.858-103.428 42.858-42.858 103.428 42.858 103.428 103.428 42.858 103.428-42.858 42.858-103.428zM1157.714 1307.428q0-60.572-42.858-103.428t-103.428-42.858-103.428 42.858-42.858 103.428 42.858 103.428 103.428 42.858 103.428-42.858 42.858-103.428zM1755.428 768q0-60.572-42.858-103.428t-103.428-42.858-103.428 42.858-42.858 103.428 42.858 103.428 103.428 42.858 103.428-42.858 42.858-103.428zM1926.858 1392q0
 -60.572-42.858-103.428t-103.428-42.858-103.428 42.858-42.858 103.428 42.858 103.428 103.428 42.858 103.428-42.858 42.858-103.428z" horiz-adv-x="2340" />
-</font></defs></svg>
\ No newline at end of file
+</font></defs></svg>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/index.html
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/index.html b/zeppelin-web/app/index.html
index dd6e801..2ad0e97 100644
--- a/zeppelin-web/app/index.html
+++ b/zeppelin-web/app/index.html
@@ -1,6 +1,4 @@
 <!--
-Copyright 2014 NFLabs
-
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/ace/textarea/src/ace-bookmarklet.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/ace/textarea/src/ace-bookmarklet.js b/zeppelin-web/app/scripts/ace/textarea/src/ace-bookmarklet.js
index 9770344..f723f43 100644
--- a/zeppelin-web/app/scripts/ace/textarea/src/ace-bookmarklet.js
+++ b/zeppelin-web/app/scripts/ace/textarea/src/ace-bookmarklet.js
@@ -1 +1,14 @@
-alert("moved to https://ajaxorg.github.io/ace-builds/demo/bookmarklet/index.html. Please update your bookmark")
\ No newline at end of file
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+alert("moved to https://ajaxorg.github.io/ace-builds/demo/bookmarklet/index.html. Please update your bookmark")

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/app.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/app.js b/zeppelin-web/app/scripts/app.js
index 55dc9b4..ff55647 100644
--- a/zeppelin-web/app/scripts/app.js
+++ b/zeppelin-web/app/scripts/app.js
@@ -1,10 +1,12 @@
-/* Copyright 2014 NFlabs
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
  *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
+ *    http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,7 +14,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 'use strict';
 
 /** get the current port of the websocket

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/controllers/interpreter.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/controllers/interpreter.js b/zeppelin-web/app/scripts/controllers/interpreter.js
index 083f3e9..0da1baa 100644
--- a/zeppelin-web/app/scripts/controllers/interpreter.js
+++ b/zeppelin-web/app/scripts/controllers/interpreter.js
@@ -1,7 +1,6 @@
 /* global confirm:false, alert:false */
 /* jshint loopfunc: true */
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/controllers/main.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/controllers/main.js b/zeppelin-web/app/scripts/controllers/main.js
index 477f544..535cf78 100644
--- a/zeppelin-web/app/scripts/controllers/main.js
+++ b/zeppelin-web/app/scripts/controllers/main.js
@@ -1,5 +1,4 @@
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/controllers/nav.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/controllers/nav.js b/zeppelin-web/app/scripts/controllers/nav.js
index 5daf2e8..3925845 100644
--- a/zeppelin-web/app/scripts/controllers/nav.js
+++ b/zeppelin-web/app/scripts/controllers/nav.js
@@ -1,6 +1,5 @@
 /* global $:false */
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/controllers/notebook.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/controllers/notebook.js b/zeppelin-web/app/scripts/controllers/notebook.js
index 3d3fead..cc295dc 100644
--- a/zeppelin-web/app/scripts/controllers/notebook.js
+++ b/zeppelin-web/app/scripts/controllers/notebook.js
@@ -1,7 +1,6 @@
 /* global confirm:false, alert:false */
 /* jshint loopfunc: true */
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/controllers/paragraph.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/controllers/paragraph.js b/zeppelin-web/app/scripts/controllers/paragraph.js
index a1cab47..aac8a78 100644
--- a/zeppelin-web/app/scripts/controllers/paragraph.js
+++ b/zeppelin-web/app/scripts/controllers/paragraph.js
@@ -1,7 +1,6 @@
 /* global $:false, jQuery:false, ace:false, confirm:false, d3:false, nv:false*/
 /*jshint loopfunc: true, unused:false */
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/directives/dropdowninput.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/directives/dropdowninput.js b/zeppelin-web/app/scripts/directives/dropdowninput.js
index 005e4b6..65dd5d3 100644
--- a/zeppelin-web/app/scripts/directives/dropdowninput.js
+++ b/zeppelin-web/app/scripts/directives/dropdowninput.js
@@ -1,3 +1,16 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 'use strict';
 
 angular.module('zeppelinWebApp').directive('dropdownInput', function () {

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/directives/ngdelete.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/directives/ngdelete.js b/zeppelin-web/app/scripts/directives/ngdelete.js
index c6967b8..9338952 100644
--- a/zeppelin-web/app/scripts/directives/ngdelete.js
+++ b/zeppelin-web/app/scripts/directives/ngdelete.js
@@ -1,3 +1,17 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 'use strict';
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/directives/ngenter.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/directives/ngenter.js b/zeppelin-web/app/scripts/directives/ngenter.js
index ed97b24..6fc4b73 100644
--- a/zeppelin-web/app/scripts/directives/ngenter.js
+++ b/zeppelin-web/app/scripts/directives/ngenter.js
@@ -1,5 +1,4 @@
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/directives/popover-html-unsafe.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/directives/popover-html-unsafe.js b/zeppelin-web/app/scripts/directives/popover-html-unsafe.js
index 95eeb20..8a84daa 100644
--- a/zeppelin-web/app/scripts/directives/popover-html-unsafe.js
+++ b/zeppelin-web/app/scripts/directives/popover-html-unsafe.js
@@ -1,3 +1,16 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 'use strict';
 
 angular.module('zeppelinWebApp')

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/scripts/directives/resizable.js
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/scripts/directives/resizable.js b/zeppelin-web/app/scripts/directives/resizable.js
index dc92308..fe46a24 100644
--- a/zeppelin-web/app/scripts/directives/resizable.js
+++ b/zeppelin-web/app/scripts/directives/resizable.js
@@ -1,3 +1,16 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 'use strict';
 
 angular.module('zeppelinWebApp').directive('resizable', function () {

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/styles/custom-font.css
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/styles/custom-font.css b/zeppelin-web/app/styles/custom-font.css
index 040ba23..91d3f01 100644
--- a/zeppelin-web/app/styles/custom-font.css
+++ b/zeppelin-web/app/styles/custom-font.css
@@ -1,3 +1,17 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 @font-face {
   font-family: 'CustomFont';
   src: url('../fonts/custom-font.eot') format('embedded-opentype'), url('../fonts/custom-font.woff') format('woff'), url('../fonts/custom-font.ttf') format('truetype'), url('../fonts/custom-font.svg') format('svg');

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/styles/interpreter.css
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/styles/interpreter.css b/zeppelin-web/app/styles/interpreter.css
index 41704c7..1dcc52b 100644
--- a/zeppelin-web/app/styles/interpreter.css
+++ b/zeppelin-web/app/styles/interpreter.css
@@ -1,3 +1,17 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 .interpreterHead {
   margin-left: -10px;
   margin-right: -10px;

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/styles/looknfeel/default.css
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/styles/looknfeel/default.css b/zeppelin-web/app/styles/looknfeel/default.css
index 5a36f9d..8aeee95 100644
--- a/zeppelin-web/app/styles/looknfeel/default.css
+++ b/zeppelin-web/app/styles/looknfeel/default.css
@@ -1,5 +1,4 @@
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/styles/looknfeel/report.css
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/styles/looknfeel/report.css b/zeppelin-web/app/styles/looknfeel/report.css
index 6a1d8d5..84f86d0 100644
--- a/zeppelin-web/app/styles/looknfeel/report.css
+++ b/zeppelin-web/app/styles/looknfeel/report.css
@@ -1,5 +1,4 @@
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/styles/looknfeel/simple.css
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/styles/looknfeel/simple.css b/zeppelin-web/app/styles/looknfeel/simple.css
index 678e0cf..9edb95e 100644
--- a/zeppelin-web/app/styles/looknfeel/simple.css
+++ b/zeppelin-web/app/styles/looknfeel/simple.css
@@ -1,5 +1,4 @@
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/styles/main.css b/zeppelin-web/app/styles/main.css
index b060188..f9cbed8 100644
--- a/zeppelin-web/app/styles/main.css
+++ b/zeppelin-web/app/styles/main.css
@@ -1,5 +1,4 @@
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/styles/notebook.css
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/styles/notebook.css b/zeppelin-web/app/styles/notebook.css
index 344197a..e605534 100644
--- a/zeppelin-web/app/styles/notebook.css
+++ b/zeppelin-web/app/styles/notebook.css
@@ -1,5 +1,4 @@
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/styles/printMode.css
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/styles/printMode.css b/zeppelin-web/app/styles/printMode.css
index 92b906b..953b3a6 100644
--- a/zeppelin-web/app/styles/printMode.css
+++ b/zeppelin-web/app/styles/printMode.css
@@ -1,5 +1,4 @@
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/styles/typography.css
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/styles/typography.css b/zeppelin-web/app/styles/typography.css
index 2ac77dd..5050f19 100644
--- a/zeppelin-web/app/styles/typography.css
+++ b/zeppelin-web/app/styles/typography.css
@@ -1,5 +1,4 @@
-/* Copyright 2014 NFLabs
- *
+/*
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/views/interpreter.html
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/views/interpreter.html b/zeppelin-web/app/views/interpreter.html
index dbc3a82..2bd3fb3 100644
--- a/zeppelin-web/app/views/interpreter.html
+++ b/zeppelin-web/app/views/interpreter.html
@@ -1,6 +1,4 @@
 <!--
-Copyright 2014 NFLabs
-
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/views/main.html
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/views/main.html b/zeppelin-web/app/views/main.html
index 4981a01..c6e8755 100644
--- a/zeppelin-web/app/views/main.html
+++ b/zeppelin-web/app/views/main.html
@@ -1,6 +1,4 @@
 <!--
-Copyright 2014 NFLabs
-
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/views/modal-shortcut.html
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/views/modal-shortcut.html b/zeppelin-web/app/views/modal-shortcut.html
index 5d2da83..1fdf9ea 100644
--- a/zeppelin-web/app/views/modal-shortcut.html
+++ b/zeppelin-web/app/views/modal-shortcut.html
@@ -1,6 +1,4 @@
 <!--
-Copyright 2014 NFLabs
-
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/views/notebooks.html
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/views/notebooks.html b/zeppelin-web/app/views/notebooks.html
index 49cecdd..f3294a1 100644
--- a/zeppelin-web/app/views/notebooks.html
+++ b/zeppelin-web/app/views/notebooks.html
@@ -1,6 +1,4 @@
 <!--
-Copyright 2014 NFLabs
-
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/views/paragraph.html
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/views/paragraph.html b/zeppelin-web/app/views/paragraph.html
index 96bf759..c77c85a 100644
--- a/zeppelin-web/app/views/paragraph.html
+++ b/zeppelin-web/app/views/paragraph.html
@@ -1,6 +1,4 @@
 <!--
-Copyright 2014 NFLabs
-
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
 You may obtain a copy of the License at


[09/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/ClientFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/ClientFactory.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/ClientFactory.java
new file mode 100644
index 0000000..6126f75
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/ClientFactory.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.pool2.BasePooledObjectFactory;
+import org.apache.commons.pool2.PooledObject;
+import org.apache.commons.pool2.impl.DefaultPooledObject;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransportException;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
+
+/**
+ *
+ */
+public class ClientFactory extends BasePooledObjectFactory<Client>{
+  private String host;
+  private int port;
+  Map<Client, TSocket> clientSocketMap = new HashMap<Client, TSocket>();
+
+  public ClientFactory(String host, int port) {
+    this.host = host;
+    this.port = port;
+  }
+
+  @Override
+  public Client create() throws Exception {
+    TSocket transport = new TSocket(host, port);
+    try {
+      transport.open();
+    } catch (TTransportException e) {
+      throw new InterpreterException(e);
+    }
+
+    TProtocol protocol = new  TBinaryProtocol(transport);
+    Client client = new RemoteInterpreterService.Client(protocol);
+
+    synchronized (clientSocketMap) {
+      clientSocketMap.put(client, transport);
+    }
+    return client;
+  }
+
+  @Override
+  public PooledObject<Client> wrap(Client client) {
+    return new DefaultPooledObject<Client>(client);
+  }
+
+  @Override
+  public void destroyObject(PooledObject<Client> p) {
+    synchronized (clientSocketMap) {
+      if (clientSocketMap.containsKey(p)) {
+        clientSocketMap.get(p).close();
+        clientSocketMap.remove(p);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java
new file mode 100644
index 0000000..e905d5f
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.thrift.TException;
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Type;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterContext;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterResult;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+
+/**
+ *
+ */
+public class RemoteInterpreter extends Interpreter {
+  Logger logger = LoggerFactory.getLogger(RemoteInterpreter.class);
+  Gson gson = new Gson();
+  private String interpreterRunner;
+  private String interpreterPath;
+  private String className;
+  FormType formType;
+  boolean initialized;
+  private Map<String, String> env;
+  static Map<String, RemoteInterpreterProcess> interpreterGroupReference
+    = new HashMap<String, RemoteInterpreterProcess>();
+
+  public RemoteInterpreter(Properties property,
+      String className,
+      String interpreterRunner,
+      String interpreterPath) {
+    super(property);
+
+    this.className = className;
+    initialized = false;
+    this.interpreterRunner = interpreterRunner;
+    this.interpreterPath = interpreterPath;
+    env = new HashMap<String, String>();
+  }
+
+  public RemoteInterpreter(Properties property,
+      String className,
+      String interpreterRunner,
+      String interpreterPath,
+      Map<String, String> env) {
+    super(property);
+
+    this.className = className;
+    this.interpreterRunner = interpreterRunner;
+    this.interpreterPath = interpreterPath;
+    this.env = env;
+  }
+
+  @Override
+  public String getClassName() {
+    return className;
+  }
+
+  public RemoteInterpreterProcess getInterpreterProcess() {
+    synchronized (interpreterGroupReference) {
+      if (interpreterGroupReference.containsKey(getInterpreterGroupKey(getInterpreterGroup()))) {
+        RemoteInterpreterProcess interpreterProcess = interpreterGroupReference
+            .get(getInterpreterGroupKey(getInterpreterGroup()));
+        try {
+          return interpreterProcess;
+        } catch (Exception e) {
+          throw new InterpreterException(e);
+        }
+      } else {
+        throw new InterpreterException("Unexpected error");
+      }
+    }
+  }
+
+  private synchronized void init() {
+    if (initialized == true) {
+      return;
+    }
+
+    RemoteInterpreterProcess interpreterProcess = null;
+
+    synchronized (interpreterGroupReference) {
+      if (interpreterGroupReference.containsKey(getInterpreterGroupKey(getInterpreterGroup()))) {
+        interpreterProcess = interpreterGroupReference
+            .get(getInterpreterGroupKey(getInterpreterGroup()));
+      } else {
+        throw new InterpreterException("Unexpected error");
+      }
+    }
+
+    int rc = interpreterProcess.reference();
+
+    synchronized (interpreterProcess) {
+      // when first process created
+      if (rc == 1) {
+        // create all interpreter class in this interpreter group
+        Client client = null;
+        try {
+          client = interpreterProcess.getClient();
+        } catch (Exception e1) {
+          throw new InterpreterException(e1);
+        }
+
+        try {
+          for (Interpreter intp : this.getInterpreterGroup()) {
+            logger.info("Create remote interpreter {}", intp.getClassName());
+            client.createInterpreter(intp.getClassName(), (Map) property);
+
+          }
+        } catch (TException e) {
+          throw new InterpreterException(e);
+        } finally {
+          interpreterProcess.releaseClient(client);
+        }
+      }
+    }
+    initialized = true;
+  }
+
+
+
+  @Override
+  public void open() {
+    init();
+  }
+
+  @Override
+  public void close() {
+    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
+    Client client = null;
+    try {
+      client = interpreterProcess.getClient();
+    } catch (Exception e1) {
+      throw new InterpreterException(e1);
+    }
+
+    try {
+      client.close(className);
+    } catch (TException e) {
+      throw new InterpreterException(e);
+    } finally {
+      interpreterProcess.releaseClient(client);
+    }
+
+    interpreterProcess.dereference();
+  }
+
+  @Override
+  public InterpreterResult interpret(String st, InterpreterContext context) {
+    FormType form = getFormType();
+    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
+    Client client = null;
+    try {
+      client = interpreterProcess.getClient();
+    } catch (Exception e1) {
+      throw new InterpreterException(e1);
+    }
+
+    try {
+      GUI settings = context.getGui();
+      RemoteInterpreterResult remoteResult = client.interpret(className, st, convert(context));
+
+      Map<String, Object> remoteConfig = (Map<String, Object>) gson.fromJson(
+          remoteResult.getConfig(), new TypeToken<Map<String, Object>>() {
+          }.getType());
+      context.getConfig().clear();
+      context.getConfig().putAll(remoteConfig);
+
+      if (form == FormType.NATIVE) {
+        GUI remoteGui = gson.fromJson(remoteResult.getGui(), GUI.class);
+        context.getGui().clear();
+        context.getGui().setParams(remoteGui.getParams());
+        context.getGui().setForms(remoteGui.getForms());
+      }
+
+      return convert(remoteResult);
+    } catch (TException e) {
+      throw new InterpreterException(e);
+    } finally {
+      interpreterProcess.releaseClient(client);
+    }
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {
+    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
+    Client client = null;
+    try {
+      client = interpreterProcess.getClient();
+    } catch (Exception e1) {
+      throw new InterpreterException(e1);
+    }
+
+    try {
+      client.cancel(className, convert(context));
+    } catch (TException e) {
+      throw new InterpreterException(e);
+    } finally {
+      interpreterProcess.releaseClient(client);
+    }
+  }
+
+
+  @Override
+  public FormType getFormType() {
+    init();
+
+    if (formType != null) {
+      return formType;
+    }
+
+    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
+    Client client = null;
+    try {
+      client = interpreterProcess.getClient();
+    } catch (Exception e1) {
+      throw new InterpreterException(e1);
+    }
+
+    try {
+      formType = FormType.valueOf(client.getFormType(className));
+      return formType;
+    } catch (TException e) {
+      throw new InterpreterException(e);
+    } finally {
+      interpreterProcess.releaseClient(client);
+    }
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
+    Client client = null;
+    try {
+      client = interpreterProcess.getClient();
+    } catch (Exception e1) {
+      throw new InterpreterException(e1);
+    }
+
+    try {
+      return client.getProgress(className, convert(context));
+    } catch (TException e) {
+      throw new InterpreterException(e);
+    } finally {
+      interpreterProcess.releaseClient(client);
+    }
+  }
+
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
+    Client client = null;
+    try {
+      client = interpreterProcess.getClient();
+    } catch (Exception e1) {
+      throw new InterpreterException(e1);
+    }
+
+    try {
+      return client.completion(className, buf, cursor);
+    } catch (TException e) {
+      throw new InterpreterException(e);
+    } finally {
+      interpreterProcess.releaseClient(client);
+    }
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    int maxConcurrency = 10;
+    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
+    return SchedulerFactory.singleton().createOrGetRemoteScheduler(
+        "remoteinterpreter_" + interpreterProcess.hashCode(),
+        getInterpreterProcess(),
+        maxConcurrency);
+  }
+
+  @Override
+  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
+    super.setInterpreterGroup(interpreterGroup);
+
+    synchronized (interpreterGroupReference) {
+      if (!interpreterGroupReference
+          .containsKey(getInterpreterGroupKey(interpreterGroup))) {
+        interpreterGroupReference.put(getInterpreterGroupKey(interpreterGroup),
+            new RemoteInterpreterProcess(interpreterRunner,
+                interpreterPath, env));
+
+        logger.info("setInterpreterGroup = "
+            + getInterpreterGroupKey(interpreterGroup) + " class=" + className
+            + ", path=" + interpreterPath);
+      }
+    }
+  }
+
+  private String getInterpreterGroupKey(InterpreterGroup interpreterGroup) {
+    return interpreterGroup.getId();
+  }
+
+  private RemoteInterpreterContext convert(InterpreterContext ic) {
+    return new RemoteInterpreterContext(
+        ic.getParagraphId(),
+        ic.getParagraphTitle(),
+        ic.getParagraphText(),
+        gson.toJson(ic.getConfig()),
+        gson.toJson(ic.getGui()));
+  }
+
+  private InterpreterResult convert(RemoteInterpreterResult result) {
+    return new InterpreterResult(
+        InterpreterResult.Code.valueOf(result.getCode()),
+        Type.valueOf(result.getType()),
+        result.getMsg());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcess.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcess.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcess.java
new file mode 100644
index 0000000..a128cd7
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcess.java
@@ -0,0 +1,208 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.exec.DefaultExecutor;
+import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.exec.ExecuteResultHandler;
+import org.apache.commons.exec.ExecuteWatchdog;
+import org.apache.commons.exec.environment.EnvironmentUtils;
+import org.apache.commons.pool2.impl.GenericObjectPool;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ */
+public class RemoteInterpreterProcess implements ExecuteResultHandler {
+  Logger logger = LoggerFactory.getLogger(RemoteInterpreterProcess.class);
+  AtomicInteger referenceCount;
+  private DefaultExecutor executor;
+  private ExecuteWatchdog watchdog;
+  boolean running = false;
+  int port = -1;
+  private String interpreterRunner;
+  private String interpreterDir;
+
+  private GenericObjectPool<Client> clientPool;
+  private Map<String, String> env;
+
+  public RemoteInterpreterProcess(String intpRunner, String intpDir, Map<String, String> env) {
+    this.interpreterRunner = intpRunner;
+    this.interpreterDir = intpDir;
+    this.env = env;
+    referenceCount = new AtomicInteger(0);
+  }
+
+  public int getPort() {
+    return port;
+  }
+
+  public int reference() {
+    synchronized (referenceCount) {
+      if (executor == null) {
+        // start server process
+        try {
+          port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
+        } catch (IOException e1) {
+          throw new InterpreterException(e1);
+        }
+
+
+        CommandLine cmdLine = CommandLine.parse(interpreterRunner);
+        cmdLine.addArgument("-d", false);
+        cmdLine.addArgument(interpreterDir, false);
+        cmdLine.addArgument("-p", false);
+        cmdLine.addArgument(Integer.toString(port), false);
+
+        executor = new DefaultExecutor();
+
+        watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
+        executor.setWatchdog(watchdog);
+
+        running = true;
+        try {
+          Map procEnv = EnvironmentUtils.getProcEnvironment();
+          procEnv.putAll(env);
+
+          logger.info("Run interpreter process {}", cmdLine);
+          executor.execute(cmdLine, procEnv, this);
+        } catch (IOException e) {
+          running = false;
+          throw new InterpreterException(e);
+        }
+
+
+        long startTime = System.currentTimeMillis();
+        while (System.currentTimeMillis() - startTime < 5 * 1000) {
+          if (RemoteInterpreterUtils.checkIfRemoteEndpointAccessible("localhost", port)) {
+            break;
+          } else {
+            try {
+              Thread.sleep(500);
+            } catch (InterruptedException e) {
+            }
+          }
+        }
+
+        clientPool = new GenericObjectPool<Client>(new ClientFactory("localhost", port));
+      }
+      return referenceCount.incrementAndGet();
+    }
+  }
+
+  public Client getClient() throws Exception {
+    return clientPool.borrowObject();
+  }
+
+  public void releaseClient(Client client) {
+    clientPool.returnObject(client);
+  }
+
+  public int dereference() {
+    synchronized (referenceCount) {
+      int r = referenceCount.decrementAndGet();
+      if (r == 0) {
+        logger.info("shutdown interpreter process");
+        // first try shutdown
+        try {
+          Client client = getClient();
+          client.shutdown();
+          releaseClient(client);
+        } catch (Exception e) {
+          logger.error("Error", e);
+          watchdog.destroyProcess();
+        }
+
+        clientPool.clear();
+        clientPool.close();
+
+        // wait for 3 sec and force kill
+        // remote process server.serve() loop is not always finishing gracefully
+        long startTime = System.currentTimeMillis();
+        while (System.currentTimeMillis() - startTime < 3 * 1000) {
+          if (this.isRunning()) {
+            try {
+              Thread.sleep(500);
+            } catch (InterruptedException e) {
+            }
+          } else {
+            break;
+          }
+        }
+
+        if (isRunning()) {
+          logger.info("kill interpreter process");
+          watchdog.destroyProcess();
+        }
+
+        executor = null;
+        watchdog = null;
+        running = false;
+        logger.info("Remote process terminated");
+      }
+      return r;
+    }
+  }
+
+  public int referenceCount() {
+    synchronized (referenceCount) {
+      return referenceCount.get();
+    }
+  }
+
+  @Override
+  public void onProcessComplete(int exitValue) {
+    logger.info("Interpreter process exited {}", exitValue);
+    running = false;
+
+  }
+
+  @Override
+  public void onProcessFailed(ExecuteException e) {
+    logger.info("Interpreter process failed {}", e);
+    running = false;
+  }
+
+  public boolean isRunning() {
+    return running;
+  }
+
+  public int getNumActiveClient() {
+    if (clientPool == null) {
+      return 0;
+    } else {
+      return clientPool.getNumActive();
+    }
+  }
+
+  public int getNumIdleClient() {
+    if (clientPool == null) {
+      return 0;
+    } else {
+      return clientPool.getNumIdle();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java
new file mode 100644
index 0000000..ee4aa2d
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java
@@ -0,0 +1,342 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote;
+
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.net.URL;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.thrift.TException;
+import org.apache.thrift.server.TThreadPoolServer;
+import org.apache.thrift.transport.TServerSocket;
+import org.apache.thrift.transport.TTransportException;
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.ClassloaderInterpreter;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.LazyOpenInterpreter;
+import org.apache.zeppelin.interpreter.Interpreter.FormType;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterContext;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterResult;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService;
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.JobListener;
+import org.apache.zeppelin.scheduler.JobProgressPoller;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.Job.Status;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+
+
+/**
+ *
+ */
+public class RemoteInterpreterServer
+  extends Thread
+  implements RemoteInterpreterService.Iface {
+  Logger logger = LoggerFactory.getLogger(RemoteInterpreterServer.class);
+
+  InterpreterGroup interpreterGroup = new InterpreterGroup();
+  Gson gson = new Gson();
+
+  RemoteInterpreterService.Processor<RemoteInterpreterServer> processor;
+  RemoteInterpreterServer handler;
+  private int port;
+  private TThreadPoolServer server;
+
+  public RemoteInterpreterServer(int port) throws TTransportException {
+    this.port = port;
+    processor = new RemoteInterpreterService.Processor<RemoteInterpreterServer>(this);
+    TServerSocket serverTransport = new TServerSocket(port);
+    server = new TThreadPoolServer(
+        new TThreadPoolServer.Args(serverTransport).processor(processor));
+  }
+
+  @Override
+  public void run() {
+    logger.info("Starting remote interpreter server on port {}", port);
+    server.serve();
+  }
+
+  @Override
+  public void shutdown() throws TException {
+    // server.stop() does not always finish server.serve() loop
+    // sometimes server.serve() is hanging even after server.stop() call.
+    // this case, need to force kill the process
+    server.stop();
+  }
+
+  public int getPort() {
+    return port;
+  }
+
+  public boolean isRunning() {
+    if (server == null) {
+      return false;
+    } else {
+      return server.isServing();
+    }
+  }
+
+
+  public static void main(String[] args)
+      throws TTransportException, InterruptedException {
+    int port = Integer.parseInt(args[0]);
+    RemoteInterpreterServer remoteInterpreterServer = new RemoteInterpreterServer(port);
+    remoteInterpreterServer.start();
+    remoteInterpreterServer.join();
+    System.exit(0);
+  }
+
+
+  @Override
+  public void createInterpreter(String className, Map<String, String> properties)
+      throws TException {
+    try {
+      Class<Interpreter> replClass = (Class<Interpreter>) Object.class.forName(className);
+      Properties p = new Properties();
+      p.putAll(properties);
+
+      Constructor<Interpreter> constructor =
+          replClass.getConstructor(new Class[] {Properties.class});
+      Interpreter repl = constructor.newInstance(p);
+
+      ClassLoader cl = ClassLoader.getSystemClassLoader();
+      repl.setClassloaderUrls(new URL[]{});
+
+      synchronized (interpreterGroup) {
+        interpreterGroup.add(new LazyOpenInterpreter(
+            new ClassloaderInterpreter(repl, cl)));
+      }
+
+      logger.info("Instantiate interpreter {}", className);
+      repl.setInterpreterGroup(interpreterGroup);
+    } catch (ClassNotFoundException | NoSuchMethodException | SecurityException
+        | InstantiationException | IllegalAccessException
+        | IllegalArgumentException | InvocationTargetException e) {
+      e.printStackTrace();
+      throw new TException(e);
+    }
+  }
+
+  private Interpreter getInterpreter(String className) throws TException {
+    synchronized (interpreterGroup) {
+      for (Interpreter inp : interpreterGroup) {
+        if (inp.getClassName().equals(className)) {
+          return inp;
+        }
+      }
+    }
+    throw new TException(new InterpreterException("Interpreter instance "
+        + className + " not found"));
+  }
+
+  @Override
+  public void open(String className) throws TException {
+    Interpreter intp = getInterpreter(className);
+    intp.open();
+  }
+
+  @Override
+  public void close(String className) throws TException {
+    Interpreter intp = getInterpreter(className);
+    intp.close();
+  }
+
+
+  @Override
+  public RemoteInterpreterResult interpret(String className, String st,
+      RemoteInterpreterContext interpreterContext) throws TException {
+    Interpreter intp = getInterpreter(className);
+    InterpreterContext context = convert(interpreterContext);
+
+    Scheduler scheduler = intp.getScheduler();
+    InterpretJobListener jobListener = new InterpretJobListener();
+    InterpretJob job = new InterpretJob(
+        interpreterContext.getParagraphId(),
+        "remoteInterpretJob_" + System.currentTimeMillis(),
+        jobListener,
+        JobProgressPoller.DEFAULT_INTERVAL_MSEC,
+        intp,
+        st,
+        context);
+
+    scheduler.submit(job);
+
+    while (!job.isTerminated()) {
+      synchronized (jobListener) {
+        try {
+          jobListener.wait(1000);
+        } catch (InterruptedException e) {
+        }
+      }
+    }
+
+    if (job.getStatus() == Status.ERROR) {
+      throw new TException(job.getException());
+    } else {
+      if (intp.getFormType() == FormType.NATIVE) {
+        // serialize dynamic form
+
+      }
+
+      return convert((InterpreterResult) job.getReturn(),
+          context.getConfig(),
+          context.getGui());
+    }
+  }
+
+  class InterpretJobListener implements JobListener {
+
+    @Override
+    public void onProgressUpdate(Job job, int progress) {
+    }
+
+    @Override
+    public void beforeStatusChange(Job job, Status before, Status after) {
+    }
+
+    @Override
+    public void afterStatusChange(Job job, Status before, Status after) {
+      synchronized (this) {
+        notifyAll();
+      }
+    }
+  }
+
+  class InterpretJob extends Job {
+
+    private Interpreter interpreter;
+    private String script;
+    private InterpreterContext context;
+
+    public InterpretJob(
+        String jobId,
+        String jobName,
+        JobListener listener,
+        long progressUpdateIntervalMsec,
+        Interpreter interpreter,
+        String script,
+        InterpreterContext context) {
+      super(jobId, jobName, listener, progressUpdateIntervalMsec);
+      this.interpreter = interpreter;
+      this.script = script;
+      this.context = context;
+    }
+
+    @Override
+    public int progress() {
+      return 0;
+    }
+
+    @Override
+    public Map<String, Object> info() {
+      return null;
+    }
+
+    @Override
+    protected Object jobRun() throws Throwable {
+      InterpreterResult result = interpreter.interpret(script, context);
+      return result;
+    }
+
+    @Override
+    protected boolean jobAbort() {
+      return false;
+    }
+  }
+
+
+  @Override
+  public void cancel(String className, RemoteInterpreterContext interpreterContext)
+      throws TException {
+    Interpreter intp = getInterpreter(className);
+    intp.cancel(convert(interpreterContext));
+  }
+
+  @Override
+  public int getProgress(String className, RemoteInterpreterContext interpreterContext)
+      throws TException {
+    Interpreter intp = getInterpreter(className);
+    return intp.getProgress(convert(interpreterContext));
+  }
+
+
+  @Override
+  public String getFormType(String className) throws TException {
+    Interpreter intp = getInterpreter(className);
+    return intp.getFormType().toString();
+  }
+
+  @Override
+  public List<String> completion(String className, String buf, int cursor) throws TException {
+    Interpreter intp = getInterpreter(className);
+    return intp.completion(buf, cursor);
+  }
+
+  private InterpreterContext convert(RemoteInterpreterContext ric) {
+    return new InterpreterContext(
+        ric.getParagraphId(),
+        ric.getParagraphTitle(),
+        ric.getParagraphText(),
+        (Map<String, Object>) gson.fromJson(ric.getConfig(),
+            new TypeToken<Map<String, Object>>() {}.getType()),
+        gson.fromJson(ric.getGui(), GUI.class));
+  }
+
+  private RemoteInterpreterResult convert(InterpreterResult result,
+      Map<String, Object> config, GUI gui) {
+    return new RemoteInterpreterResult(
+        result.code().name(),
+        result.type().name(),
+        result.message(),
+        gson.toJson(config),
+        gson.toJson(gui));
+  }
+
+  @Override
+  public String getStatus(String jobId)
+      throws TException {
+    synchronized (interpreterGroup) {
+      for (Interpreter intp : interpreterGroup) {
+        for (Job job : intp.getScheduler().getJobsRunning()) {
+          if (jobId.equals(job.getId())) {
+            return job.getStatus().name();
+          }
+        }
+
+        for (Job job : intp.getScheduler().getJobsWaiting()) {
+          if (jobId.equals(job.getId())) {
+            return job.getStatus().name();
+          }
+        }
+      }
+    }
+    return "Unknown";
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtils.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtils.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtils.java
new file mode 100644
index 0000000..27983ae
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtils.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.net.Socket;
+
+/**
+ *
+ */
+public class RemoteInterpreterUtils {
+  public static int findRandomAvailablePortOnAllLocalInterfaces() throws IOException {
+    int port;
+    try (ServerSocket socket = new ServerSocket(0);) {
+      port = socket.getLocalPort();
+      socket.close();
+    }
+    return port;
+  }
+
+  public static boolean checkIfRemoteEndpointAccessible(String host, int port) {
+    try {
+      Socket discover = new Socket();
+      discover.setSoTimeout(1000);
+      discover.connect(new InetSocketAddress(host, port), 1000);
+      discover.close();
+      return true;
+    } catch (IOException e) {
+      return false;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterContext.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterContext.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterContext.java
new file mode 100644
index 0000000..4284cf1
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterContext.java
@@ -0,0 +1,786 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.zeppelin.interpreter.thrift;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class RemoteInterpreterContext implements org.apache.thrift.TBase<RemoteInterpreterContext, RemoteInterpreterContext._Fields>, java.io.Serializable, Cloneable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteInterpreterContext");
+
+  private static final org.apache.thrift.protocol.TField PARAGRAPH_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphId", org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField PARAGRAPH_TITLE_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphTitle", org.apache.thrift.protocol.TType.STRING, (short)2);
+  private static final org.apache.thrift.protocol.TField PARAGRAPH_TEXT_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphText", org.apache.thrift.protocol.TType.STRING, (short)3);
+  private static final org.apache.thrift.protocol.TField CONFIG_FIELD_DESC = new org.apache.thrift.protocol.TField("config", org.apache.thrift.protocol.TType.STRING, (short)4);
+  private static final org.apache.thrift.protocol.TField GUI_FIELD_DESC = new org.apache.thrift.protocol.TField("gui", org.apache.thrift.protocol.TType.STRING, (short)5);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new RemoteInterpreterContextStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new RemoteInterpreterContextTupleSchemeFactory());
+  }
+
+  public String paragraphId; // required
+  public String paragraphTitle; // required
+  public String paragraphText; // required
+  public String config; // required
+  public String gui; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    PARAGRAPH_ID((short)1, "paragraphId"),
+    PARAGRAPH_TITLE((short)2, "paragraphTitle"),
+    PARAGRAPH_TEXT((short)3, "paragraphText"),
+    CONFIG((short)4, "config"),
+    GUI((short)5, "gui");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // PARAGRAPH_ID
+          return PARAGRAPH_ID;
+        case 2: // PARAGRAPH_TITLE
+          return PARAGRAPH_TITLE;
+        case 3: // PARAGRAPH_TEXT
+          return PARAGRAPH_TEXT;
+        case 4: // CONFIG
+          return CONFIG;
+        case 5: // GUI
+          return GUI;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.PARAGRAPH_ID, new org.apache.thrift.meta_data.FieldMetaData("paragraphId", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.PARAGRAPH_TITLE, new org.apache.thrift.meta_data.FieldMetaData("paragraphTitle", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.PARAGRAPH_TEXT, new org.apache.thrift.meta_data.FieldMetaData("paragraphText", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.CONFIG, new org.apache.thrift.meta_data.FieldMetaData("config", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.GUI, new org.apache.thrift.meta_data.FieldMetaData("gui", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteInterpreterContext.class, metaDataMap);
+  }
+
+  public RemoteInterpreterContext() {
+  }
+
+  public RemoteInterpreterContext(
+    String paragraphId,
+    String paragraphTitle,
+    String paragraphText,
+    String config,
+    String gui)
+  {
+    this();
+    this.paragraphId = paragraphId;
+    this.paragraphTitle = paragraphTitle;
+    this.paragraphText = paragraphText;
+    this.config = config;
+    this.gui = gui;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public RemoteInterpreterContext(RemoteInterpreterContext other) {
+    if (other.isSetParagraphId()) {
+      this.paragraphId = other.paragraphId;
+    }
+    if (other.isSetParagraphTitle()) {
+      this.paragraphTitle = other.paragraphTitle;
+    }
+    if (other.isSetParagraphText()) {
+      this.paragraphText = other.paragraphText;
+    }
+    if (other.isSetConfig()) {
+      this.config = other.config;
+    }
+    if (other.isSetGui()) {
+      this.gui = other.gui;
+    }
+  }
+
+  public RemoteInterpreterContext deepCopy() {
+    return new RemoteInterpreterContext(this);
+  }
+
+  @Override
+  public void clear() {
+    this.paragraphId = null;
+    this.paragraphTitle = null;
+    this.paragraphText = null;
+    this.config = null;
+    this.gui = null;
+  }
+
+  public String getParagraphId() {
+    return this.paragraphId;
+  }
+
+  public RemoteInterpreterContext setParagraphId(String paragraphId) {
+    this.paragraphId = paragraphId;
+    return this;
+  }
+
+  public void unsetParagraphId() {
+    this.paragraphId = null;
+  }
+
+  /** Returns true if field paragraphId is set (has been assigned a value) and false otherwise */
+  public boolean isSetParagraphId() {
+    return this.paragraphId != null;
+  }
+
+  public void setParagraphIdIsSet(boolean value) {
+    if (!value) {
+      this.paragraphId = null;
+    }
+  }
+
+  public String getParagraphTitle() {
+    return this.paragraphTitle;
+  }
+
+  public RemoteInterpreterContext setParagraphTitle(String paragraphTitle) {
+    this.paragraphTitle = paragraphTitle;
+    return this;
+  }
+
+  public void unsetParagraphTitle() {
+    this.paragraphTitle = null;
+  }
+
+  /** Returns true if field paragraphTitle is set (has been assigned a value) and false otherwise */
+  public boolean isSetParagraphTitle() {
+    return this.paragraphTitle != null;
+  }
+
+  public void setParagraphTitleIsSet(boolean value) {
+    if (!value) {
+      this.paragraphTitle = null;
+    }
+  }
+
+  public String getParagraphText() {
+    return this.paragraphText;
+  }
+
+  public RemoteInterpreterContext setParagraphText(String paragraphText) {
+    this.paragraphText = paragraphText;
+    return this;
+  }
+
+  public void unsetParagraphText() {
+    this.paragraphText = null;
+  }
+
+  /** Returns true if field paragraphText is set (has been assigned a value) and false otherwise */
+  public boolean isSetParagraphText() {
+    return this.paragraphText != null;
+  }
+
+  public void setParagraphTextIsSet(boolean value) {
+    if (!value) {
+      this.paragraphText = null;
+    }
+  }
+
+  public String getConfig() {
+    return this.config;
+  }
+
+  public RemoteInterpreterContext setConfig(String config) {
+    this.config = config;
+    return this;
+  }
+
+  public void unsetConfig() {
+    this.config = null;
+  }
+
+  /** Returns true if field config is set (has been assigned a value) and false otherwise */
+  public boolean isSetConfig() {
+    return this.config != null;
+  }
+
+  public void setConfigIsSet(boolean value) {
+    if (!value) {
+      this.config = null;
+    }
+  }
+
+  public String getGui() {
+    return this.gui;
+  }
+
+  public RemoteInterpreterContext setGui(String gui) {
+    this.gui = gui;
+    return this;
+  }
+
+  public void unsetGui() {
+    this.gui = null;
+  }
+
+  /** Returns true if field gui is set (has been assigned a value) and false otherwise */
+  public boolean isSetGui() {
+    return this.gui != null;
+  }
+
+  public void setGuiIsSet(boolean value) {
+    if (!value) {
+      this.gui = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case PARAGRAPH_ID:
+      if (value == null) {
+        unsetParagraphId();
+      } else {
+        setParagraphId((String)value);
+      }
+      break;
+
+    case PARAGRAPH_TITLE:
+      if (value == null) {
+        unsetParagraphTitle();
+      } else {
+        setParagraphTitle((String)value);
+      }
+      break;
+
+    case PARAGRAPH_TEXT:
+      if (value == null) {
+        unsetParagraphText();
+      } else {
+        setParagraphText((String)value);
+      }
+      break;
+
+    case CONFIG:
+      if (value == null) {
+        unsetConfig();
+      } else {
+        setConfig((String)value);
+      }
+      break;
+
+    case GUI:
+      if (value == null) {
+        unsetGui();
+      } else {
+        setGui((String)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case PARAGRAPH_ID:
+      return getParagraphId();
+
+    case PARAGRAPH_TITLE:
+      return getParagraphTitle();
+
+    case PARAGRAPH_TEXT:
+      return getParagraphText();
+
+    case CONFIG:
+      return getConfig();
+
+    case GUI:
+      return getGui();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case PARAGRAPH_ID:
+      return isSetParagraphId();
+    case PARAGRAPH_TITLE:
+      return isSetParagraphTitle();
+    case PARAGRAPH_TEXT:
+      return isSetParagraphText();
+    case CONFIG:
+      return isSetConfig();
+    case GUI:
+      return isSetGui();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof RemoteInterpreterContext)
+      return this.equals((RemoteInterpreterContext)that);
+    return false;
+  }
+
+  public boolean equals(RemoteInterpreterContext that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_paragraphId = true && this.isSetParagraphId();
+    boolean that_present_paragraphId = true && that.isSetParagraphId();
+    if (this_present_paragraphId || that_present_paragraphId) {
+      if (!(this_present_paragraphId && that_present_paragraphId))
+        return false;
+      if (!this.paragraphId.equals(that.paragraphId))
+        return false;
+    }
+
+    boolean this_present_paragraphTitle = true && this.isSetParagraphTitle();
+    boolean that_present_paragraphTitle = true && that.isSetParagraphTitle();
+    if (this_present_paragraphTitle || that_present_paragraphTitle) {
+      if (!(this_present_paragraphTitle && that_present_paragraphTitle))
+        return false;
+      if (!this.paragraphTitle.equals(that.paragraphTitle))
+        return false;
+    }
+
+    boolean this_present_paragraphText = true && this.isSetParagraphText();
+    boolean that_present_paragraphText = true && that.isSetParagraphText();
+    if (this_present_paragraphText || that_present_paragraphText) {
+      if (!(this_present_paragraphText && that_present_paragraphText))
+        return false;
+      if (!this.paragraphText.equals(that.paragraphText))
+        return false;
+    }
+
+    boolean this_present_config = true && this.isSetConfig();
+    boolean that_present_config = true && that.isSetConfig();
+    if (this_present_config || that_present_config) {
+      if (!(this_present_config && that_present_config))
+        return false;
+      if (!this.config.equals(that.config))
+        return false;
+    }
+
+    boolean this_present_gui = true && this.isSetGui();
+    boolean that_present_gui = true && that.isSetGui();
+    if (this_present_gui || that_present_gui) {
+      if (!(this_present_gui && that_present_gui))
+        return false;
+      if (!this.gui.equals(that.gui))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return 0;
+  }
+
+  public int compareTo(RemoteInterpreterContext other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+    RemoteInterpreterContext typedOther = (RemoteInterpreterContext)other;
+
+    lastComparison = Boolean.valueOf(isSetParagraphId()).compareTo(typedOther.isSetParagraphId());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetParagraphId()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphId, typedOther.paragraphId);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetParagraphTitle()).compareTo(typedOther.isSetParagraphTitle());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetParagraphTitle()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphTitle, typedOther.paragraphTitle);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetParagraphText()).compareTo(typedOther.isSetParagraphText());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetParagraphText()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphText, typedOther.paragraphText);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetConfig()).compareTo(typedOther.isSetConfig());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetConfig()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.config, typedOther.config);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetGui()).compareTo(typedOther.isSetGui());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetGui()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.gui, typedOther.gui);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("RemoteInterpreterContext(");
+    boolean first = true;
+
+    sb.append("paragraphId:");
+    if (this.paragraphId == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.paragraphId);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("paragraphTitle:");
+    if (this.paragraphTitle == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.paragraphTitle);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("paragraphText:");
+    if (this.paragraphText == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.paragraphText);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("config:");
+    if (this.config == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.config);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("gui:");
+    if (this.gui == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.gui);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class RemoteInterpreterContextStandardSchemeFactory implements SchemeFactory {
+    public RemoteInterpreterContextStandardScheme getScheme() {
+      return new RemoteInterpreterContextStandardScheme();
+    }
+  }
+
+  private static class RemoteInterpreterContextStandardScheme extends StandardScheme<RemoteInterpreterContext> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteInterpreterContext struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // PARAGRAPH_ID
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.paragraphId = iprot.readString();
+              struct.setParagraphIdIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // PARAGRAPH_TITLE
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.paragraphTitle = iprot.readString();
+              struct.setParagraphTitleIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 3: // PARAGRAPH_TEXT
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.paragraphText = iprot.readString();
+              struct.setParagraphTextIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 4: // CONFIG
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.config = iprot.readString();
+              struct.setConfigIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 5: // GUI
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.gui = iprot.readString();
+              struct.setGuiIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+
+      // check for required fields of primitive type, which can't be checked in the validate method
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, RemoteInterpreterContext struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.paragraphId != null) {
+        oprot.writeFieldBegin(PARAGRAPH_ID_FIELD_DESC);
+        oprot.writeString(struct.paragraphId);
+        oprot.writeFieldEnd();
+      }
+      if (struct.paragraphTitle != null) {
+        oprot.writeFieldBegin(PARAGRAPH_TITLE_FIELD_DESC);
+        oprot.writeString(struct.paragraphTitle);
+        oprot.writeFieldEnd();
+      }
+      if (struct.paragraphText != null) {
+        oprot.writeFieldBegin(PARAGRAPH_TEXT_FIELD_DESC);
+        oprot.writeString(struct.paragraphText);
+        oprot.writeFieldEnd();
+      }
+      if (struct.config != null) {
+        oprot.writeFieldBegin(CONFIG_FIELD_DESC);
+        oprot.writeString(struct.config);
+        oprot.writeFieldEnd();
+      }
+      if (struct.gui != null) {
+        oprot.writeFieldBegin(GUI_FIELD_DESC);
+        oprot.writeString(struct.gui);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class RemoteInterpreterContextTupleSchemeFactory implements SchemeFactory {
+    public RemoteInterpreterContextTupleScheme getScheme() {
+      return new RemoteInterpreterContextTupleScheme();
+    }
+  }
+
+  private static class RemoteInterpreterContextTupleScheme extends TupleScheme<RemoteInterpreterContext> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterContext struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetParagraphId()) {
+        optionals.set(0);
+      }
+      if (struct.isSetParagraphTitle()) {
+        optionals.set(1);
+      }
+      if (struct.isSetParagraphText()) {
+        optionals.set(2);
+      }
+      if (struct.isSetConfig()) {
+        optionals.set(3);
+      }
+      if (struct.isSetGui()) {
+        optionals.set(4);
+      }
+      oprot.writeBitSet(optionals, 5);
+      if (struct.isSetParagraphId()) {
+        oprot.writeString(struct.paragraphId);
+      }
+      if (struct.isSetParagraphTitle()) {
+        oprot.writeString(struct.paragraphTitle);
+      }
+      if (struct.isSetParagraphText()) {
+        oprot.writeString(struct.paragraphText);
+      }
+      if (struct.isSetConfig()) {
+        oprot.writeString(struct.config);
+      }
+      if (struct.isSetGui()) {
+        oprot.writeString(struct.gui);
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterContext struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(5);
+      if (incoming.get(0)) {
+        struct.paragraphId = iprot.readString();
+        struct.setParagraphIdIsSet(true);
+      }
+      if (incoming.get(1)) {
+        struct.paragraphTitle = iprot.readString();
+        struct.setParagraphTitleIsSet(true);
+      }
+      if (incoming.get(2)) {
+        struct.paragraphText = iprot.readString();
+        struct.setParagraphTextIsSet(true);
+      }
+      if (incoming.get(3)) {
+        struct.config = iprot.readString();
+        struct.setConfigIsSet(true);
+      }
+      if (incoming.get(4)) {
+        struct.gui = iprot.readString();
+        struct.setGuiIsSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterResult.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterResult.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterResult.java
new file mode 100644
index 0000000..5a68bd0
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterResult.java
@@ -0,0 +1,786 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.zeppelin.interpreter.thrift;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class RemoteInterpreterResult implements org.apache.thrift.TBase<RemoteInterpreterResult, RemoteInterpreterResult._Fields>, java.io.Serializable, Cloneable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteInterpreterResult");
+
+  private static final org.apache.thrift.protocol.TField CODE_FIELD_DESC = new org.apache.thrift.protocol.TField("code", org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.STRING, (short)2);
+  private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)3);
+  private static final org.apache.thrift.protocol.TField CONFIG_FIELD_DESC = new org.apache.thrift.protocol.TField("config", org.apache.thrift.protocol.TType.STRING, (short)4);
+  private static final org.apache.thrift.protocol.TField GUI_FIELD_DESC = new org.apache.thrift.protocol.TField("gui", org.apache.thrift.protocol.TType.STRING, (short)5);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new RemoteInterpreterResultStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new RemoteInterpreterResultTupleSchemeFactory());
+  }
+
+  public String code; // required
+  public String type; // required
+  public String msg; // required
+  public String config; // required
+  public String gui; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    CODE((short)1, "code"),
+    TYPE((short)2, "type"),
+    MSG((short)3, "msg"),
+    CONFIG((short)4, "config"),
+    GUI((short)5, "gui");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // CODE
+          return CODE;
+        case 2: // TYPE
+          return TYPE;
+        case 3: // MSG
+          return MSG;
+        case 4: // CONFIG
+          return CONFIG;
+        case 5: // GUI
+          return GUI;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.CODE, new org.apache.thrift.meta_data.FieldMetaData("code", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.CONFIG, new org.apache.thrift.meta_data.FieldMetaData("config", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.GUI, new org.apache.thrift.meta_data.FieldMetaData("gui", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteInterpreterResult.class, metaDataMap);
+  }
+
+  public RemoteInterpreterResult() {
+  }
+
+  public RemoteInterpreterResult(
+    String code,
+    String type,
+    String msg,
+    String config,
+    String gui)
+  {
+    this();
+    this.code = code;
+    this.type = type;
+    this.msg = msg;
+    this.config = config;
+    this.gui = gui;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public RemoteInterpreterResult(RemoteInterpreterResult other) {
+    if (other.isSetCode()) {
+      this.code = other.code;
+    }
+    if (other.isSetType()) {
+      this.type = other.type;
+    }
+    if (other.isSetMsg()) {
+      this.msg = other.msg;
+    }
+    if (other.isSetConfig()) {
+      this.config = other.config;
+    }
+    if (other.isSetGui()) {
+      this.gui = other.gui;
+    }
+  }
+
+  public RemoteInterpreterResult deepCopy() {
+    return new RemoteInterpreterResult(this);
+  }
+
+  @Override
+  public void clear() {
+    this.code = null;
+    this.type = null;
+    this.msg = null;
+    this.config = null;
+    this.gui = null;
+  }
+
+  public String getCode() {
+    return this.code;
+  }
+
+  public RemoteInterpreterResult setCode(String code) {
+    this.code = code;
+    return this;
+  }
+
+  public void unsetCode() {
+    this.code = null;
+  }
+
+  /** Returns true if field code is set (has been assigned a value) and false otherwise */
+  public boolean isSetCode() {
+    return this.code != null;
+  }
+
+  public void setCodeIsSet(boolean value) {
+    if (!value) {
+      this.code = null;
+    }
+  }
+
+  public String getType() {
+    return this.type;
+  }
+
+  public RemoteInterpreterResult setType(String type) {
+    this.type = type;
+    return this;
+  }
+
+  public void unsetType() {
+    this.type = null;
+  }
+
+  /** Returns true if field type is set (has been assigned a value) and false otherwise */
+  public boolean isSetType() {
+    return this.type != null;
+  }
+
+  public void setTypeIsSet(boolean value) {
+    if (!value) {
+      this.type = null;
+    }
+  }
+
+  public String getMsg() {
+    return this.msg;
+  }
+
+  public RemoteInterpreterResult setMsg(String msg) {
+    this.msg = msg;
+    return this;
+  }
+
+  public void unsetMsg() {
+    this.msg = null;
+  }
+
+  /** Returns true if field msg is set (has been assigned a value) and false otherwise */
+  public boolean isSetMsg() {
+    return this.msg != null;
+  }
+
+  public void setMsgIsSet(boolean value) {
+    if (!value) {
+      this.msg = null;
+    }
+  }
+
+  public String getConfig() {
+    return this.config;
+  }
+
+  public RemoteInterpreterResult setConfig(String config) {
+    this.config = config;
+    return this;
+  }
+
+  public void unsetConfig() {
+    this.config = null;
+  }
+
+  /** Returns true if field config is set (has been assigned a value) and false otherwise */
+  public boolean isSetConfig() {
+    return this.config != null;
+  }
+
+  public void setConfigIsSet(boolean value) {
+    if (!value) {
+      this.config = null;
+    }
+  }
+
+  public String getGui() {
+    return this.gui;
+  }
+
+  public RemoteInterpreterResult setGui(String gui) {
+    this.gui = gui;
+    return this;
+  }
+
+  public void unsetGui() {
+    this.gui = null;
+  }
+
+  /** Returns true if field gui is set (has been assigned a value) and false otherwise */
+  public boolean isSetGui() {
+    return this.gui != null;
+  }
+
+  public void setGuiIsSet(boolean value) {
+    if (!value) {
+      this.gui = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case CODE:
+      if (value == null) {
+        unsetCode();
+      } else {
+        setCode((String)value);
+      }
+      break;
+
+    case TYPE:
+      if (value == null) {
+        unsetType();
+      } else {
+        setType((String)value);
+      }
+      break;
+
+    case MSG:
+      if (value == null) {
+        unsetMsg();
+      } else {
+        setMsg((String)value);
+      }
+      break;
+
+    case CONFIG:
+      if (value == null) {
+        unsetConfig();
+      } else {
+        setConfig((String)value);
+      }
+      break;
+
+    case GUI:
+      if (value == null) {
+        unsetGui();
+      } else {
+        setGui((String)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case CODE:
+      return getCode();
+
+    case TYPE:
+      return getType();
+
+    case MSG:
+      return getMsg();
+
+    case CONFIG:
+      return getConfig();
+
+    case GUI:
+      return getGui();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case CODE:
+      return isSetCode();
+    case TYPE:
+      return isSetType();
+    case MSG:
+      return isSetMsg();
+    case CONFIG:
+      return isSetConfig();
+    case GUI:
+      return isSetGui();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof RemoteInterpreterResult)
+      return this.equals((RemoteInterpreterResult)that);
+    return false;
+  }
+
+  public boolean equals(RemoteInterpreterResult that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_code = true && this.isSetCode();
+    boolean that_present_code = true && that.isSetCode();
+    if (this_present_code || that_present_code) {
+      if (!(this_present_code && that_present_code))
+        return false;
+      if (!this.code.equals(that.code))
+        return false;
+    }
+
+    boolean this_present_type = true && this.isSetType();
+    boolean that_present_type = true && that.isSetType();
+    if (this_present_type || that_present_type) {
+      if (!(this_present_type && that_present_type))
+        return false;
+      if (!this.type.equals(that.type))
+        return false;
+    }
+
+    boolean this_present_msg = true && this.isSetMsg();
+    boolean that_present_msg = true && that.isSetMsg();
+    if (this_present_msg || that_present_msg) {
+      if (!(this_present_msg && that_present_msg))
+        return false;
+      if (!this.msg.equals(that.msg))
+        return false;
+    }
+
+    boolean this_present_config = true && this.isSetConfig();
+    boolean that_present_config = true && that.isSetConfig();
+    if (this_present_config || that_present_config) {
+      if (!(this_present_config && that_present_config))
+        return false;
+      if (!this.config.equals(that.config))
+        return false;
+    }
+
+    boolean this_present_gui = true && this.isSetGui();
+    boolean that_present_gui = true && that.isSetGui();
+    if (this_present_gui || that_present_gui) {
+      if (!(this_present_gui && that_present_gui))
+        return false;
+      if (!this.gui.equals(that.gui))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return 0;
+  }
+
+  public int compareTo(RemoteInterpreterResult other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+    RemoteInterpreterResult typedOther = (RemoteInterpreterResult)other;
+
+    lastComparison = Boolean.valueOf(isSetCode()).compareTo(typedOther.isSetCode());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetCode()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.code, typedOther.code);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetType()).compareTo(typedOther.isSetType());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetType()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, typedOther.type);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetMsg()).compareTo(typedOther.isSetMsg());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetMsg()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, typedOther.msg);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetConfig()).compareTo(typedOther.isSetConfig());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetConfig()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.config, typedOther.config);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetGui()).compareTo(typedOther.isSetGui());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetGui()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.gui, typedOther.gui);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("RemoteInterpreterResult(");
+    boolean first = true;
+
+    sb.append("code:");
+    if (this.code == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.code);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("type:");
+    if (this.type == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.type);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("msg:");
+    if (this.msg == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.msg);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("config:");
+    if (this.config == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.config);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("gui:");
+    if (this.gui == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.gui);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class RemoteInterpreterResultStandardSchemeFactory implements SchemeFactory {
+    public RemoteInterpreterResultStandardScheme getScheme() {
+      return new RemoteInterpreterResultStandardScheme();
+    }
+  }
+
+  private static class RemoteInterpreterResultStandardScheme extends StandardScheme<RemoteInterpreterResult> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // CODE
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.code = iprot.readString();
+              struct.setCodeIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // TYPE
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.type = iprot.readString();
+              struct.setTypeIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 3: // MSG
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.msg = iprot.readString();
+              struct.setMsgIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 4: // CONFIG
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.config = iprot.readString();
+              struct.setConfigIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 5: // GUI
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.gui = iprot.readString();
+              struct.setGuiIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+
+      // check for required fields of primitive type, which can't be checked in the validate method
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.code != null) {
+        oprot.writeFieldBegin(CODE_FIELD_DESC);
+        oprot.writeString(struct.code);
+        oprot.writeFieldEnd();
+      }
+      if (struct.type != null) {
+        oprot.writeFieldBegin(TYPE_FIELD_DESC);
+        oprot.writeString(struct.type);
+        oprot.writeFieldEnd();
+      }
+      if (struct.msg != null) {
+        oprot.writeFieldBegin(MSG_FIELD_DESC);
+        oprot.writeString(struct.msg);
+        oprot.writeFieldEnd();
+      }
+      if (struct.config != null) {
+        oprot.writeFieldBegin(CONFIG_FIELD_DESC);
+        oprot.writeString(struct.config);
+        oprot.writeFieldEnd();
+      }
+      if (struct.gui != null) {
+        oprot.writeFieldBegin(GUI_FIELD_DESC);
+        oprot.writeString(struct.gui);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class RemoteInterpreterResultTupleSchemeFactory implements SchemeFactory {
+    public RemoteInterpreterResultTupleScheme getScheme() {
+      return new RemoteInterpreterResultTupleScheme();
+    }
+  }
+
+  private static class RemoteInterpreterResultTupleScheme extends TupleScheme<RemoteInterpreterResult> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetCode()) {
+        optionals.set(0);
+      }
+      if (struct.isSetType()) {
+        optionals.set(1);
+      }
+      if (struct.isSetMsg()) {
+        optionals.set(2);
+      }
+      if (struct.isSetConfig()) {
+        optionals.set(3);
+      }
+      if (struct.isSetGui()) {
+        optionals.set(4);
+      }
+      oprot.writeBitSet(optionals, 5);
+      if (struct.isSetCode()) {
+        oprot.writeString(struct.code);
+      }
+      if (struct.isSetType()) {
+        oprot.writeString(struct.type);
+      }
+      if (struct.isSetMsg()) {
+        oprot.writeString(struct.msg);
+      }
+      if (struct.isSetConfig()) {
+        oprot.writeString(struct.config);
+      }
+      if (struct.isSetGui()) {
+        oprot.writeString(struct.gui);
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(5);
+      if (incoming.get(0)) {
+        struct.code = iprot.readString();
+        struct.setCodeIsSet(true);
+      }
+      if (incoming.get(1)) {
+        struct.type = iprot.readString();
+        struct.setTypeIsSet(true);
+      }
+      if (incoming.get(2)) {
+        struct.msg = iprot.readString();
+        struct.setMsgIsSet(true);
+      }
+      if (incoming.get(3)) {
+        struct.config = iprot.readString();
+        struct.setConfigIsSet(true);
+      }
+      if (incoming.get(4)) {
+        struct.gui = iprot.readString();
+        struct.setGuiIsSet(true);
+      }
+    }
+  }
+
+}
+


[15/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
new file mode 100644
index 0000000..9fce093
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
@@ -0,0 +1,422 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import java.io.BufferedWriter;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
+import java.net.ServerSocket;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.commons.compress.utils.IOUtils;
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.exec.DefaultExecutor;
+import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.exec.ExecuteResultHandler;
+import org.apache.commons.exec.ExecuteWatchdog;
+import org.apache.commons.exec.PumpStreamHandler;
+import org.apache.commons.exec.environment.EnvironmentUtils;
+import org.apache.spark.SparkConf;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.sql.SQLContext;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.LazyOpenInterpreter;
+import org.apache.zeppelin.interpreter.WrappedInterpreter;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import py4j.GatewayServer;
+
+/**
+ *
+ */
+public class PySparkInterpreter extends Interpreter implements ExecuteResultHandler {
+  Logger logger = LoggerFactory.getLogger(PySparkInterpreter.class);
+  private GatewayServer gatewayServer;
+  private DefaultExecutor executor;
+  private int port;
+  private ByteArrayOutputStream outputStream;
+  private ByteArrayOutputStream errStream;
+  private BufferedWriter ins;
+  private PipedInputStream in;
+  private ByteArrayOutputStream input;
+  private String scriptPath;
+  boolean pythonscriptRunning = false;
+
+  static {
+    Interpreter.register(
+        "pyspark",
+        "spark",
+        PySparkInterpreter.class.getName(),
+        new InterpreterPropertyBuilder()
+          .add("spark.home",
+               SparkInterpreter.getSystemDefault("SPARK_HOME", "spark.home", ""),
+               "Spark home path. Should be provided for pyspark")
+          .add("zeppelin.pyspark.python",
+               SparkInterpreter.getSystemDefault("PYSPARK_PYTHON", null, "python"),
+               "Python command to run pyspark with").build());
+  }
+
+  public PySparkInterpreter(Properties property) {
+    super(property);
+
+    scriptPath = System.getProperty("java.io.tmpdir") + "/zeppelin_pyspark.py";
+  }
+
+  private String getSparkHome() {
+    String sparkHome = getProperty("spark.home");
+    if (sparkHome == null) {
+      throw new InterpreterException("spark.home is undefined");
+    } else {
+      return sparkHome;
+    }
+  }
+
+
+  private void createPythonScript() {
+    ClassLoader classLoader = getClass().getClassLoader();
+    File out = new File(scriptPath);
+
+    if (out.exists() && out.isDirectory()) {
+      throw new InterpreterException("Can't create python script " + out.getAbsolutePath());
+    }
+
+    try {
+      FileOutputStream outStream = new FileOutputStream(out);
+      IOUtils.copy(
+          classLoader.getResourceAsStream("python/zeppelin_pyspark.py"),
+          outStream);
+      outStream.close();
+    } catch (IOException e) {
+      throw new InterpreterException(e);
+    }
+
+    logger.info("File {} created", scriptPath);
+  }
+
+  @Override
+  public void open() {
+    // create python script
+    createPythonScript();
+
+    port = findRandomOpenPortOnAllLocalInterfaces();
+
+    gatewayServer = new GatewayServer(this, port);
+    gatewayServer.start();
+
+    // Run python shell
+    CommandLine cmd = CommandLine.parse(getProperty("zeppelin.pyspark.python"));
+    cmd.addArgument(scriptPath, false);
+    cmd.addArgument(Integer.toString(port), false);
+    executor = new DefaultExecutor();
+    outputStream = new ByteArrayOutputStream();
+    PipedOutputStream ps = new PipedOutputStream();
+    in = null;
+    try {
+      in = new PipedInputStream(ps);
+    } catch (IOException e1) {
+      throw new InterpreterException(e1);
+    }
+    ins = new BufferedWriter(new OutputStreamWriter(ps));
+
+    input = new ByteArrayOutputStream();
+
+    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
+    executor.setStreamHandler(streamHandler);
+    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
+
+
+    try {
+      Map env = EnvironmentUtils.getProcEnvironment();
+
+      String pythonPath = (String) env.get("PYTHONPATH");
+      if (pythonPath == null) {
+        pythonPath = "";
+      } else {
+        pythonPath += ":";
+      }
+
+      pythonPath += getSparkHome() + "/python/lib/py4j-0.8.2.1-src.zip:"
+          + getSparkHome() + "/python";
+
+      env.put("PYTHONPATH", pythonPath);
+
+      executor.execute(cmd, env, this);
+      pythonscriptRunning = true;
+    } catch (IOException e) {
+      throw new InterpreterException(e);
+    }
+
+
+    try {
+      input.write("import sys, getopt\n".getBytes());
+      ins.flush();
+    } catch (IOException e) {
+      throw new InterpreterException(e);
+    }
+  }
+
+  private int findRandomOpenPortOnAllLocalInterfaces() {
+    int port;
+    try (ServerSocket socket = new ServerSocket(0);) {
+      port = socket.getLocalPort();
+      socket.close();
+    } catch (IOException e) {
+      throw new InterpreterException(e);
+    }
+    return port;
+  }
+
+  @Override
+  public void close() {
+    executor.getWatchdog().destroyProcess();
+    gatewayServer.shutdown();
+  }
+
+  PythonInterpretRequest pythonInterpretRequest = null;
+
+  /**
+   *
+   */
+  public class PythonInterpretRequest {
+    public String statements;
+    public String jobGroup;
+
+    public PythonInterpretRequest(String statements, String jobGroup) {
+      this.statements = statements;
+      this.jobGroup = jobGroup;
+    }
+
+    public String statements() {
+      return statements;
+    }
+
+    public String jobGroup() {
+      return jobGroup;
+    }
+  }
+
+  Integer statementSetNotifier = new Integer(0);
+
+  public PythonInterpretRequest getStatements() {
+    synchronized (statementSetNotifier) {
+      while (pythonInterpretRequest == null) {
+        try {
+          statementSetNotifier.wait(1000);
+        } catch (InterruptedException e) {
+        }
+      }
+      PythonInterpretRequest req = pythonInterpretRequest;
+      pythonInterpretRequest = null;
+      return req;
+    }
+  }
+
+  String statementOutput = null;
+  boolean statementError = false;
+  Integer statementFinishedNotifier = new Integer(0);
+
+  public void setStatementsFinished(String out, boolean error) {
+    synchronized (statementFinishedNotifier) {
+      statementOutput = out;
+      statementError = error;
+      statementFinishedNotifier.notify();
+    }
+
+  }
+
+  boolean pythonScriptInitialized = false;
+  Integer pythonScriptInitializeNotifier = new Integer(0);
+
+  public void onPythonScriptInitialized() {
+    synchronized (pythonScriptInitializeNotifier) {
+      pythonScriptInitialized = true;
+      pythonScriptInitializeNotifier.notifyAll();
+    }
+  }
+
+  @Override
+  public InterpreterResult interpret(String st, InterpreterContext context) {
+    if (!pythonscriptRunning) {
+      return new InterpreterResult(Code.ERROR, "python process not running"
+          + outputStream.toString());
+    }
+
+    outputStream.reset();
+
+    synchronized (pythonScriptInitializeNotifier) {
+      long startTime = System.currentTimeMillis();
+      while (pythonScriptInitialized == false
+          && pythonscriptRunning
+          && System.currentTimeMillis() - startTime < 10 * 1000) {
+        try {
+          pythonScriptInitializeNotifier.wait(1000);
+        } catch (InterruptedException e) {
+        }
+      }
+    }
+
+    if (pythonscriptRunning == false) {
+      // python script failed to initialize and terminated
+      return new InterpreterResult(Code.ERROR, "failed to start pyspark"
+          + outputStream.toString());
+    }
+    if (pythonScriptInitialized == false) {
+      // timeout. didn't get initialized message
+      return new InterpreterResult(Code.ERROR, "pyspark is not responding "
+          + outputStream.toString());
+    }
+
+    SparkInterpreter sparkInterpreter = getSparkInterpreter();
+    if (!sparkInterpreter.getSparkContext().version().startsWith("1.2") &&
+        !sparkInterpreter.getSparkContext().version().startsWith("1.3")) {
+      return new InterpreterResult(Code.ERROR, "pyspark "
+          + sparkInterpreter.getSparkContext().version() + " is not supported");
+    }
+    String jobGroup = sparkInterpreter.getJobGroup(context);
+    ZeppelinContext z = sparkInterpreter.getZeppelinContext();
+    z.setInterpreterContext(context);
+    z.setGui(context.getGui());
+    pythonInterpretRequest = new PythonInterpretRequest(st, jobGroup);
+    statementOutput = null;
+
+    synchronized (statementSetNotifier) {
+      statementSetNotifier.notify();
+    }
+
+    synchronized (statementFinishedNotifier) {
+      while (statementOutput == null) {
+        try {
+          statementFinishedNotifier.wait(1000);
+        } catch (InterruptedException e) {
+        }
+      }
+    }
+
+    if (statementError) {
+      return new InterpreterResult(Code.ERROR, statementOutput);
+    } else {
+      return new InterpreterResult(Code.SUCCESS, statementOutput);
+    }
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {
+    SparkInterpreter sparkInterpreter = getSparkInterpreter();
+    sparkInterpreter.cancel(context);
+  }
+
+  @Override
+  public FormType getFormType() {
+    return FormType.NATIVE;
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    SparkInterpreter sparkInterpreter = getSparkInterpreter();
+    return sparkInterpreter.getProgress(context);
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    // not supported
+    return new LinkedList<String>();
+  }
+
+  private SparkInterpreter getSparkInterpreter() {
+    InterpreterGroup intpGroup = getInterpreterGroup();
+    synchronized (intpGroup) {
+      for (Interpreter intp : getInterpreterGroup()){
+        if (intp.getClassName().equals(SparkInterpreter.class.getName())) {
+          Interpreter p = intp;
+          while (p instanceof WrappedInterpreter) {
+            if (p instanceof LazyOpenInterpreter) {
+              ((LazyOpenInterpreter) p).open();
+            }
+            p = ((WrappedInterpreter) p).getInnerInterpreter();
+          }
+          return (SparkInterpreter) p;
+        }
+      }
+    }
+    return null;
+  }
+
+  public ZeppelinContext getZeppelinContext() {
+    SparkInterpreter sparkIntp = getSparkInterpreter();
+    if (sparkIntp != null) {
+      return getSparkInterpreter().getZeppelinContext();
+    } else {
+      return null;
+    }
+  }
+
+  public JavaSparkContext getJavaSparkContext() {
+    SparkInterpreter intp = getSparkInterpreter();
+    if (intp == null) {
+      return null;
+    } else {
+      return new JavaSparkContext(intp.getSparkContext());
+    }
+  }
+
+  public SparkConf getSparkConf() {
+    JavaSparkContext sc = getJavaSparkContext();
+    if (sc == null) {
+      return null;
+    } else {
+      return getJavaSparkContext().getConf();
+    }
+  }
+
+  public SQLContext getSQLContext() {
+    SparkInterpreter intp = getSparkInterpreter();
+    if (intp == null) {
+      return null;
+    } else {
+      return intp.getSQLContext();
+    }
+  }
+
+
+  @Override
+  public void onProcessComplete(int exitValue) {
+    pythonscriptRunning = false;
+    logger.info("python process terminated. exit code " + exitValue);
+  }
+
+  @Override
+  public void onProcessFailed(ExecuteException e) {
+    pythonscriptRunning = false;
+    logger.error("python process failed", e);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
new file mode 100644
index 0000000..71c5ab5
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
@@ -0,0 +1,741 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintStream;
+import java.io.PrintWriter;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.spark.HttpServer;
+import org.apache.spark.SparkConf;
+import org.apache.spark.SparkContext;
+import org.apache.spark.SparkEnv;
+import org.apache.spark.repl.SparkCommandLine;
+import org.apache.spark.repl.SparkILoop;
+import org.apache.spark.repl.SparkIMain;
+import org.apache.spark.repl.SparkJLineCompletion;
+import org.apache.spark.scheduler.ActiveJob;
+import org.apache.spark.scheduler.DAGScheduler;
+import org.apache.spark.scheduler.Pool;
+import org.apache.spark.scheduler.Stage;
+import org.apache.spark.sql.SQLContext;
+import org.apache.spark.ui.jobs.JobProgressListener;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.interpreter.InterpreterUtils;
+import org.apache.zeppelin.interpreter.WrappedInterpreter;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.apache.zeppelin.spark.dep.DependencyContext;
+import org.apache.zeppelin.spark.dep.DependencyResolver;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import scala.Console;
+import scala.Enumeration.Value;
+import scala.None;
+import scala.Some;
+import scala.Tuple2;
+import scala.collection.Iterator;
+import scala.collection.JavaConversions;
+import scala.collection.JavaConverters;
+import scala.collection.mutable.HashMap;
+import scala.collection.mutable.HashSet;
+import scala.tools.nsc.Settings;
+import scala.tools.nsc.interpreter.Completion.Candidates;
+import scala.tools.nsc.interpreter.Completion.ScalaCompleter;
+import scala.tools.nsc.settings.MutableSettings.BooleanSetting;
+import scala.tools.nsc.settings.MutableSettings.PathSetting;
+
+/**
+ * Spark interpreter for Zeppelin.
+ *
+ */
+public class SparkInterpreter extends Interpreter {
+  Logger logger = LoggerFactory.getLogger(SparkInterpreter.class);
+
+  static {
+    Interpreter.register(
+        "spark",
+        "spark",
+        SparkInterpreter.class.getName(),
+        new InterpreterPropertyBuilder()
+            .add("spark.app.name", "Zeppelin", "The name of spark application.")
+            .add("master",
+                getSystemDefault("MASTER", "spark.master", "local[*]"),
+                "Spark master uri. ex) spark://masterhost:7077")
+            .add("spark.executor.memory",
+                getSystemDefault(null, "spark.executor.memory", "512m"),
+                "Executor memory per worker instance. ex) 512m, 32g")
+            .add("spark.cores.max",
+                getSystemDefault(null, "spark.cores.max", ""),
+                "Total number of cores to use. Empty value uses all available core.")
+            .add("spark.yarn.jar",
+                getSystemDefault("SPARK_YARN_JAR", "spark.yarn.jar", ""),
+                "The location of the Spark jar file. If you use yarn as a cluster, "
+                + "we should set this value")
+            .add("zeppelin.spark.useHiveContext", "true",
+                 "Use HiveContext instead of SQLContext if it is true.")
+            .add("args", "", "spark commandline args").build());
+
+  }
+
+  private ZeppelinContext z;
+  private SparkILoop interpreter;
+  private SparkIMain intp;
+  private SparkContext sc;
+  private ByteArrayOutputStream out;
+  private SQLContext sqlc;
+  private DependencyResolver dep;
+  private SparkJLineCompletion completor;
+
+  private JobProgressListener sparkListener;
+
+  private Map<String, Object> binder;
+  private SparkEnv env;
+
+
+  public SparkInterpreter(Properties property) {
+    super(property);
+    out = new ByteArrayOutputStream();
+  }
+
+  public SparkInterpreter(Properties property, SparkContext sc) {
+    this(property);
+
+    this.sc = sc;
+    env = SparkEnv.get();
+    sparkListener = setupListeners(this.sc);
+  }
+
+  public synchronized SparkContext getSparkContext() {
+    if (sc == null) {
+      sc = createSparkContext();
+      env = SparkEnv.get();
+      sparkListener = setupListeners(sc);
+    }
+    return sc;
+  }
+
+  public boolean isSparkContextInitialized() {
+    return sc != null;
+  }
+
+  private static JobProgressListener setupListeners(SparkContext context) {
+    JobProgressListener pl = new JobProgressListener(context.getConf());
+    context.listenerBus().addListener(pl);
+    return pl;
+  }
+
+  private boolean useHiveContext() {
+    return Boolean.parseBoolean(getProperty("zeppelin.spark.useHiveContext"));
+  }
+
+  public SQLContext getSQLContext() {
+    if (sqlc == null) {
+      if (useHiveContext()) {
+        String name = "org.apache.spark.sql.hive.HiveContext";
+        Constructor<?> hc;
+        try {
+          hc = getClass().getClassLoader().loadClass(name)
+              .getConstructor(SparkContext.class);
+          sqlc = (SQLContext) hc.newInstance(getSparkContext());
+        } catch (NoSuchMethodException | SecurityException
+            | ClassNotFoundException | InstantiationException
+            | IllegalAccessException | IllegalArgumentException
+            | InvocationTargetException e) {
+          logger.warn("Can't create HiveContext. Fallback to SQLContext", e);
+          // when hive dependency is not loaded, it'll fail.
+          // in this case SQLContext can be used.
+          sqlc = new SQLContext(getSparkContext());
+        }
+      } else {
+        sqlc = new SQLContext(getSparkContext());
+      }
+    }
+
+    return sqlc;
+  }
+
+  public DependencyResolver getDependencyResolver() {
+    if (dep == null) {
+      dep = new DependencyResolver(intp, sc, getProperty("zeppelin.dep.localrepo"));
+    }
+    return dep;
+  }
+
+  private DepInterpreter getDepInterpreter() {
+    InterpreterGroup intpGroup = getInterpreterGroup();
+    if (intpGroup == null) return null;
+    synchronized (intpGroup) {
+      for (Interpreter intp : intpGroup) {
+        if (intp.getClassName().equals(DepInterpreter.class.getName())) {
+          Interpreter p = intp;
+          while (p instanceof WrappedInterpreter) {
+            p = ((WrappedInterpreter) p).getInnerInterpreter();
+          }
+          return (DepInterpreter) p;
+        }
+      }
+    }
+    return null;
+  }
+
+  public SparkContext createSparkContext() {
+    System.err.println("------ Create new SparkContext " + getProperty("master") + " -------");
+
+    String execUri = System.getenv("SPARK_EXECUTOR_URI");
+    String[] jars = SparkILoop.getAddedJars();
+
+    String classServerUri = null;
+
+    try { // in case of spark 1.1x, spark 1.2x
+      Method classServer = interpreter.intp().getClass().getMethod("classServer");
+      HttpServer httpServer = (HttpServer) classServer.invoke(interpreter.intp());
+      classServerUri = httpServer.uri();
+    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
+        | IllegalArgumentException | InvocationTargetException e) {
+      // continue
+    }
+
+    if (classServerUri == null) {
+      try { // for spark 1.3x
+        Method classServer = interpreter.intp().getClass().getMethod("classServerUri");
+        classServerUri = (String) classServer.invoke(interpreter.intp());
+      } catch (NoSuchMethodException | SecurityException | IllegalAccessException
+          | IllegalArgumentException | InvocationTargetException e) {
+        throw new InterpreterException(e);
+      }
+    }
+
+    SparkConf conf =
+        new SparkConf()
+            .setMaster(getProperty("master"))
+            .setAppName(getProperty("spark.app.name"))
+            .setJars(jars)
+            .set("spark.repl.class.uri", classServerUri);
+
+    if (execUri != null) {
+      conf.set("spark.executor.uri", execUri);
+    }
+    if (System.getenv("SPARK_HOME") != null) {
+      conf.setSparkHome(System.getenv("SPARK_HOME"));
+    }
+    conf.set("spark.scheduler.mode", "FAIR");
+
+    Properties intpProperty = getProperty();
+
+    for (Object k : intpProperty.keySet()) {
+      String key = (String) k;
+      Object value = intpProperty.get(key);
+      if (!isEmptyString(value)) {
+        logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, value));
+        conf.set(key, (String) value);
+      }
+    }
+
+    SparkContext sparkContext = new SparkContext(conf);
+    return sparkContext;
+  }
+
+  public static boolean isEmptyString(Object val) {
+    return val instanceof String && ((String) val).trim().isEmpty();
+  }
+
+  public static String getSystemDefault(
+      String envName,
+      String propertyName,
+      String defaultValue) {
+
+    if (envName != null && !envName.isEmpty()) {
+      String envValue = System.getenv().get(envName);
+      if (envValue != null) {
+        return envValue;
+      }
+    }
+
+    if (propertyName != null && !propertyName.isEmpty()) {
+      String propValue = System.getProperty(propertyName);
+      if (propValue != null) {
+        return propValue;
+      }
+    }
+    return defaultValue;
+  }
+
+  @Override
+  public void open() {
+    URL[] urls = getClassloaderUrls();
+
+    // Very nice discussion about how scala compiler handle classpath
+    // https://groups.google.com/forum/#!topic/scala-user/MlVwo2xCCI0
+
+    /*
+     * > val env = new nsc.Settings(errLogger) > env.usejavacp.value = true > val p = new
+     * Interpreter(env) > p.setContextClassLoader > Alternatively you can set the class path through
+     * nsc.Settings.classpath.
+     *
+     * >> val settings = new Settings() >> settings.usejavacp.value = true >>
+     * settings.classpath.value += File.pathSeparator + >> System.getProperty("java.class.path") >>
+     * val in = new Interpreter(settings) { >> override protected def parentClassLoader =
+     * getClass.getClassLoader >> } >> in.setContextClassLoader()
+     */
+    Settings settings = new Settings();
+    if (getProperty("args") != null) {
+      String[] argsArray = getProperty("args").split(" ");
+      LinkedList<String> argList = new LinkedList<String>();
+      for (String arg : argsArray) {
+        argList.add(arg);
+      }
+
+      SparkCommandLine command =
+          new SparkCommandLine(scala.collection.JavaConversions.asScalaBuffer(
+              argList).toList());
+      settings = command.settings();
+    }
+
+    // set classpath for scala compiler
+    PathSetting pathSettings = settings.classpath();
+    String classpath = "";
+    List<File> paths = currentClassPath();
+    for (File f : paths) {
+      if (classpath.length() > 0) {
+        classpath += File.pathSeparator;
+      }
+      classpath += f.getAbsolutePath();
+    }
+
+    if (urls != null) {
+      for (URL u : urls) {
+        if (classpath.length() > 0) {
+          classpath += File.pathSeparator;
+        }
+        classpath += u.getFile();
+      }
+    }
+
+    // add dependency from DepInterpreter
+    DepInterpreter depInterpreter = getDepInterpreter();
+    if (depInterpreter != null) {
+      DependencyContext depc = depInterpreter.getDependencyContext();
+      if (depc != null) {
+        List<File> files = depc.getFiles();
+        if (files != null) {
+          for (File f : files) {
+            if (classpath.length() > 0) {
+              classpath += File.pathSeparator;
+            }
+            classpath += f.getAbsolutePath();
+          }
+        }
+      }
+    }
+
+    pathSettings.v_$eq(classpath);
+    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);
+
+
+    // set classloader for scala compiler
+    settings.explicitParentLoader_$eq(new Some<ClassLoader>(Thread.currentThread()
+        .getContextClassLoader()));
+    BooleanSetting b = (BooleanSetting) settings.usejavacp();
+    b.v_$eq(true);
+    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
+
+    PrintStream printStream = new PrintStream(out);
+
+    /* spark interpreter */
+    this.interpreter = new SparkILoop(null, new PrintWriter(out));
+    interpreter.settings_$eq(settings);
+
+    interpreter.createInterpreter();
+
+    intp = interpreter.intp();
+    intp.setContextClassLoader();
+    intp.initializeSynchronous();
+
+    completor = new SparkJLineCompletion(intp);
+
+    sc = getSparkContext();
+    if (sc.getPoolForName("fair").isEmpty()) {
+      Value schedulingMode = org.apache.spark.scheduler.SchedulingMode.FAIR();
+      int minimumShare = 0;
+      int weight = 1;
+      Pool pool = new Pool("fair", schedulingMode, minimumShare, weight);
+      sc.taskScheduler().rootPool().addSchedulable(pool);
+    }
+
+    sqlc = getSQLContext();
+
+    dep = getDependencyResolver();
+
+    z = new ZeppelinContext(sc, sqlc, null, dep, printStream);
+
+    try {
+      if (sc.version().startsWith("1.1") || sc.version().startsWith("1.2")) {
+        Method loadFiles = this.interpreter.getClass().getMethod("loadFiles", Settings.class);
+        loadFiles.invoke(this.interpreter, settings);
+      } else if (sc.version().startsWith("1.3")) {
+        Method loadFiles = this.interpreter.getClass().getMethod(
+            "org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
+        loadFiles.invoke(this.interpreter, settings);
+      }
+    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
+        | IllegalArgumentException | InvocationTargetException e) {
+      throw new InterpreterException(e);
+    }
+
+
+    intp.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
+    binder = (Map<String, Object>) getValue("_binder");
+    binder.put("sc", sc);
+    binder.put("sqlc", sqlc);
+    binder.put("z", z);
+    binder.put("out", printStream);
+
+    intp.interpret("@transient val z = "
+                 + "_binder.get(\"z\").asInstanceOf[org.apache.zeppelin.spark.ZeppelinContext]");
+    intp.interpret("@transient val sc = "
+                 + "_binder.get(\"sc\").asInstanceOf[org.apache.spark.SparkContext]");
+    intp.interpret("@transient val sqlc = "
+                 + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
+    intp.interpret("@transient val sqlContext = "
+                 + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
+    intp.interpret("import org.apache.spark.SparkContext._");
+
+    if (sc.version().startsWith("1.1")) {
+      intp.interpret("import sqlContext._");
+    } else if (sc.version().startsWith("1.2")) {
+      intp.interpret("import sqlContext._");
+    } else if (sc.version().startsWith("1.3")) {
+      intp.interpret("import sqlContext.implicits._");
+      intp.interpret("import sqlContext.sql");
+      intp.interpret("import org.apache.spark.sql.functions._");
+    }
+
+    // add jar
+    if (depInterpreter != null) {
+      DependencyContext depc = depInterpreter.getDependencyContext();
+      if (depc != null) {
+        List<File> files = depc.getFilesDist();
+        if (files != null) {
+          for (File f : files) {
+            if (f.getName().toLowerCase().endsWith(".jar")) {
+              sc.addJar(f.getAbsolutePath());
+              logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
+            } else {
+              sc.addFile(f.getAbsolutePath());
+              logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
+            }
+          }
+        }
+      }
+    }
+  }
+
+  private List<File> currentClassPath() {
+    List<File> paths = classPath(Thread.currentThread().getContextClassLoader());
+    String[] cps = System.getProperty("java.class.path").split(File.pathSeparator);
+    if (cps != null) {
+      for (String cp : cps) {
+        paths.add(new File(cp));
+      }
+    }
+    return paths;
+  }
+
+  private List<File> classPath(ClassLoader cl) {
+    List<File> paths = new LinkedList<File>();
+    if (cl == null) {
+      return paths;
+    }
+
+    if (cl instanceof URLClassLoader) {
+      URLClassLoader ucl = (URLClassLoader) cl;
+      URL[] urls = ucl.getURLs();
+      if (urls != null) {
+        for (URL url : urls) {
+          paths.add(new File(url.getFile()));
+        }
+      }
+    }
+    return paths;
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    ScalaCompleter c = completor.completer();
+    Candidates ret = c.complete(buf, cursor);
+    return scala.collection.JavaConversions.asJavaList(ret.candidates());
+  }
+
+  public Object getValue(String name) {
+    Object ret = intp.valueOfTerm(name);
+    if (ret instanceof None) {
+      return null;
+    } else if (ret instanceof Some) {
+      return ((Some) ret).get();
+    } else {
+      return ret;
+    }
+  }
+
+  String getJobGroup(InterpreterContext context){
+    return "zeppelin-" + this.hashCode() + "-" + context.getParagraphId();
+  }
+
+  /**
+   * Interpret a single line.
+   */
+  @Override
+  public InterpreterResult interpret(String line, InterpreterContext context) {
+    z.setInterpreterContext(context);
+    if (line == null || line.trim().length() == 0) {
+      return new InterpreterResult(Code.SUCCESS);
+    }
+    return interpret(line.split("\n"), context);
+  }
+
+  public InterpreterResult interpret(String[] lines, InterpreterContext context) {
+    synchronized (this) {
+      z.setGui(context.getGui());
+      sc.setJobGroup(getJobGroup(context), "Zeppelin", false);
+      InterpreterResult r = interpretInput(lines);
+      sc.clearJobGroup();
+      return r;
+    }
+  }
+
+  public InterpreterResult interpretInput(String[] lines) {
+    SparkEnv.set(env);
+
+    // add print("") to make sure not finishing with comment
+    // see https://github.com/NFLabs/zeppelin/issues/151
+    String[] linesToRun = new String[lines.length + 1];
+    for (int i = 0; i < lines.length; i++) {
+      linesToRun[i] = lines[i];
+    }
+    linesToRun[lines.length] = "print(\"\")";
+
+    Console.setOut((java.io.PrintStream) binder.get("out"));
+    out.reset();
+    Code r = null;
+    String incomplete = "";
+    for (String s : linesToRun) {
+      scala.tools.nsc.interpreter.Results.Result res = null;
+      try {
+        res = intp.interpret(incomplete + s);
+      } catch (Exception e) {
+        sc.clearJobGroup();
+        logger.info("Interpreter exception", e);
+        return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e));
+      }
+
+      r = getResultCode(res);
+
+      if (r == Code.ERROR) {
+        sc.clearJobGroup();
+        return new InterpreterResult(r, out.toString());
+      } else if (r == Code.INCOMPLETE) {
+        incomplete += s + "\n";
+      } else {
+        incomplete = "";
+      }
+    }
+
+    if (r == Code.INCOMPLETE) {
+      return new InterpreterResult(r, "Incomplete expression");
+    } else {
+      return new InterpreterResult(r, out.toString());
+    }
+  }
+
+
+  @Override
+  public void cancel(InterpreterContext context) {
+    sc.cancelJobGroup(getJobGroup(context));
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    String jobGroup = getJobGroup(context);
+    int completedTasks = 0;
+    int totalTasks = 0;
+
+    DAGScheduler scheduler = sc.dagScheduler();
+    if (scheduler == null) {
+      return 0;
+    }
+    HashSet<ActiveJob> jobs = scheduler.activeJobs();
+    if (jobs == null || jobs.size() == 0) {
+      return 0;
+    }
+    Iterator<ActiveJob> it = jobs.iterator();
+    while (it.hasNext()) {
+      ActiveJob job = it.next();
+      String g = (String) job.properties().get("spark.jobGroup.id");
+
+      if (jobGroup.equals(g)) {
+        int[] progressInfo = null;
+        if (sc.version().startsWith("1.0")) {
+          progressInfo = getProgressFromStage_1_0x(sparkListener, job.finalStage());
+        } else if (sc.version().startsWith("1.1")) {
+          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
+        } else if (sc.version().startsWith("1.2")) {
+          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
+        } else if (sc.version().startsWith("1.3")) {
+          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
+        } else {
+          continue;
+        }
+        totalTasks += progressInfo[0];
+        completedTasks += progressInfo[1];
+      }
+    }
+
+    if (totalTasks == 0) {
+      return 0;
+    }
+    return completedTasks * 100 / totalTasks;
+  }
+
+  private int[] getProgressFromStage_1_0x(JobProgressListener sparkListener, Stage stage) {
+    int numTasks = stage.numTasks();
+    int completedTasks = 0;
+
+    Method method;
+    Object completedTaskInfo = null;
+    try {
+      method = sparkListener.getClass().getMethod("stageIdToTasksComplete");
+      completedTaskInfo =
+          JavaConversions.asJavaMap((HashMap<Object, Object>) method.invoke(sparkListener)).get(
+              stage.id());
+    } catch (NoSuchMethodException | SecurityException e) {
+      logger.error("Error while getting progress", e);
+    } catch (IllegalAccessException e) {
+      logger.error("Error while getting progress", e);
+    } catch (IllegalArgumentException e) {
+      logger.error("Error while getting progress", e);
+    } catch (InvocationTargetException e) {
+      logger.error("Error while getting progress", e);
+    }
+
+    if (completedTaskInfo != null) {
+      completedTasks += (int) completedTaskInfo;
+    }
+    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
+    if (parents != null) {
+      for (Stage s : parents) {
+        int[] p = getProgressFromStage_1_0x(sparkListener, s);
+        numTasks += p[0];
+        completedTasks += p[1];
+      }
+    }
+
+    return new int[] {numTasks, completedTasks};
+  }
+
+  private int[] getProgressFromStage_1_1x(JobProgressListener sparkListener, Stage stage) {
+    int numTasks = stage.numTasks();
+    int completedTasks = 0;
+
+    try {
+      Method stageIdToData = sparkListener.getClass().getMethod("stageIdToData");
+      HashMap<Tuple2<Object, Object>, Object> stageIdData =
+          (HashMap<Tuple2<Object, Object>, Object>) stageIdToData.invoke(sparkListener);
+      Class<?> stageUIDataClass =
+          this.getClass().forName("org.apache.spark.ui.jobs.UIData$StageUIData");
+
+      Method numCompletedTasks = stageUIDataClass.getMethod("numCompleteTasks");
+
+      Set<Tuple2<Object, Object>> keys =
+          JavaConverters.asJavaSetConverter(stageIdData.keySet()).asJava();
+      for (Tuple2<Object, Object> k : keys) {
+        if (stage.id() == (int) k._1()) {
+          Object uiData = stageIdData.get(k).get();
+          completedTasks += (int) numCompletedTasks.invoke(uiData);
+        }
+      }
+    } catch (Exception e) {
+      logger.error("Error on getting progress information", e);
+    }
+
+    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
+    if (parents != null) {
+      for (Stage s : parents) {
+        int[] p = getProgressFromStage_1_1x(sparkListener, s);
+        numTasks += p[0];
+        completedTasks += p[1];
+      }
+    }
+    return new int[] {numTasks, completedTasks};
+  }
+
+  private Code getResultCode(scala.tools.nsc.interpreter.Results.Result r) {
+    if (r instanceof scala.tools.nsc.interpreter.Results.Success$) {
+      return Code.SUCCESS;
+    } else if (r instanceof scala.tools.nsc.interpreter.Results.Incomplete$) {
+      return Code.INCOMPLETE;
+    } else {
+      return Code.ERROR;
+    }
+  }
+
+  @Override
+  public void close() {
+    sc.stop();
+    sc = null;
+
+    intp.close();
+  }
+
+  @Override
+  public FormType getFormType() {
+    return FormType.NATIVE;
+  }
+
+  public JobProgressListener getJobProgressListener() {
+    return sparkListener;
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    return SchedulerFactory.singleton().createOrGetFIFOScheduler(
+      SparkInterpreter.class.getName() + this.hashCode());
+  }
+
+  public ZeppelinContext getZeppelinContext() {
+    return z;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
new file mode 100644
index 0000000..2555988
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
@@ -0,0 +1,362 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.spark.SparkContext;
+import org.apache.spark.scheduler.ActiveJob;
+import org.apache.spark.scheduler.DAGScheduler;
+import org.apache.spark.scheduler.Stage;
+import org.apache.spark.sql.SQLContext;
+import org.apache.spark.sql.SQLContext.QueryExecution;
+import org.apache.spark.sql.catalyst.expressions.Attribute;
+import org.apache.spark.ui.jobs.JobProgressListener;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterUtils;
+import org.apache.zeppelin.interpreter.LazyOpenInterpreter;
+import org.apache.zeppelin.interpreter.WrappedInterpreter;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import scala.Tuple2;
+import scala.collection.Iterator;
+import scala.collection.JavaConversions;
+import scala.collection.JavaConverters;
+import scala.collection.mutable.HashMap;
+import scala.collection.mutable.HashSet;
+
+/**
+ * Spark SQL interpreter for Zeppelin.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class SparkSqlInterpreter extends Interpreter {
+  Logger logger = LoggerFactory.getLogger(SparkSqlInterpreter.class);
+  AtomicInteger num = new AtomicInteger(0);
+
+  static {
+    Interpreter.register(
+        "sql",
+        "spark",
+        SparkSqlInterpreter.class.getName(),
+        new InterpreterPropertyBuilder()
+            .add("zeppelin.spark.maxResult", "10000", "Max number of SparkSQL result to display.")
+            .add("zeppelin.spark.concurrentSQL", "false",
+                "Execute multiple SQL concurrently if set true.")
+            .build());
+  }
+
+  private String getJobGroup(InterpreterContext context){
+    return "zeppelin-" + this.hashCode() + "-" + context.getParagraphId();
+  }
+
+  private int maxResult;
+
+  public SparkSqlInterpreter(Properties property) {
+    super(property);
+  }
+
+  @Override
+  public void open() {
+    this.maxResult = Integer.parseInt(getProperty("zeppelin.spark.maxResult"));
+  }
+
+  private SparkInterpreter getSparkInterpreter() {
+    for (Interpreter intp : getInterpreterGroup()) {
+      if (intp.getClassName().equals(SparkInterpreter.class.getName())) {
+        Interpreter p = intp;
+        while (p instanceof WrappedInterpreter) {
+          if (p instanceof LazyOpenInterpreter) {
+            p.open();
+          }
+          p = ((WrappedInterpreter) p).getInnerInterpreter();
+        }
+        return (SparkInterpreter) p;
+      }
+    }
+    return null;
+  }
+
+  public boolean concurrentSQL() {
+    return Boolean.parseBoolean(getProperty("zeppelin.spark.concurrentSQL"));
+  }
+
+  @Override
+  public void close() {}
+
+  @Override
+  public InterpreterResult interpret(String st, InterpreterContext context) {
+    SQLContext sqlc = null;
+
+    sqlc = getSparkInterpreter().getSQLContext();
+
+    SparkContext sc = sqlc.sparkContext();
+    if (concurrentSQL()) {
+      sc.setLocalProperty("spark.scheduler.pool", "fair");
+    } else {
+      sc.setLocalProperty("spark.scheduler.pool", null);
+    }
+
+    sc.setJobGroup(getJobGroup(context), "Zeppelin", false);
+
+    // SchemaRDD - spark 1.1, 1.2, DataFrame - spark 1.3
+    Object rdd;
+    Object[] rows = null;
+    try {
+      rdd = sqlc.sql(st);
+
+      Method take = rdd.getClass().getMethod("take", int.class);
+      rows = (Object[]) take.invoke(rdd, maxResult + 1);
+    } catch (Exception e) {
+      logger.error("Error", e);
+      sc.clearJobGroup();
+      return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e));
+    }
+
+    String msg = null;
+
+    // get field names
+    Method queryExecution;
+    QueryExecution qe;
+    try {
+      queryExecution = rdd.getClass().getMethod("queryExecution");
+      qe = (QueryExecution) queryExecution.invoke(rdd);
+    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
+        | IllegalArgumentException | InvocationTargetException e) {
+      throw new InterpreterException(e);
+    }
+
+    List<Attribute> columns =
+        scala.collection.JavaConverters.asJavaListConverter(
+            qe.analyzed().output()).asJava();
+
+    for (Attribute col : columns) {
+      if (msg == null) {
+        msg = col.name();
+      } else {
+        msg += "\t" + col.name();
+      }
+    }
+
+    msg += "\n";
+
+    // ArrayType, BinaryType, BooleanType, ByteType, DecimalType, DoubleType, DynamicType,
+    // FloatType, FractionalType, IntegerType, IntegralType, LongType, MapType, NativeType,
+    // NullType, NumericType, ShortType, StringType, StructType
+
+    try {
+      for (int r = 0; r < maxResult && r < rows.length; r++) {
+        Object row = rows[r];
+        Method isNullAt = row.getClass().getMethod("isNullAt", int.class);
+        Method apply = row.getClass().getMethod("apply", int.class);
+
+        for (int i = 0; i < columns.size(); i++) {
+          if (!(Boolean) isNullAt.invoke(row, i)) {
+            msg += apply.invoke(row, i).toString();
+          } else {
+            msg += "null";
+          }
+          if (i != columns.size() - 1) {
+            msg += "\t";
+          }
+        }
+        msg += "\n";
+      }
+    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
+        | IllegalArgumentException | InvocationTargetException e) {
+      throw new InterpreterException(e);
+    }
+
+    if (rows.length > maxResult) {
+      msg += "\n<font color=red>Results are limited by " + maxResult + ".</font>";
+    }
+    InterpreterResult rett = new InterpreterResult(Code.SUCCESS, "%table " + msg);
+    sc.clearJobGroup();
+    return rett;
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {
+    SQLContext sqlc = getSparkInterpreter().getSQLContext();
+    SparkContext sc = sqlc.sparkContext();
+
+    sc.cancelJobGroup(getJobGroup(context));
+  }
+
+  @Override
+  public FormType getFormType() {
+    return FormType.SIMPLE;
+  }
+
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    String jobGroup = getJobGroup(context);
+    SQLContext sqlc = getSparkInterpreter().getSQLContext();
+    SparkContext sc = sqlc.sparkContext();
+    JobProgressListener sparkListener = getSparkInterpreter().getJobProgressListener();
+    int completedTasks = 0;
+    int totalTasks = 0;
+
+    DAGScheduler scheduler = sc.dagScheduler();
+    HashSet<ActiveJob> jobs = scheduler.activeJobs();
+    Iterator<ActiveJob> it = jobs.iterator();
+    while (it.hasNext()) {
+      ActiveJob job = it.next();
+      String g = (String) job.properties().get("spark.jobGroup.id");
+      if (jobGroup.equals(g)) {
+        int[] progressInfo = null;
+        if (sc.version().startsWith("1.0")) {
+          progressInfo = getProgressFromStage_1_0x(sparkListener, job.finalStage());
+        } else if (sc.version().startsWith("1.1")) {
+          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
+        } else if (sc.version().startsWith("1.2")) {
+          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
+        } else if (sc.version().startsWith("1.3")) {
+          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
+        } else {
+          logger.warn("Spark {} getting progress information not supported" + sc.version());
+          continue;
+        }
+        totalTasks += progressInfo[0];
+        completedTasks += progressInfo[1];
+      }
+    }
+
+    if (totalTasks == 0) {
+      return 0;
+    }
+    return completedTasks * 100 / totalTasks;
+  }
+
+  private int[] getProgressFromStage_1_0x(JobProgressListener sparkListener, Stage stage) {
+    int numTasks = stage.numTasks();
+    int completedTasks = 0;
+
+    Method method;
+    Object completedTaskInfo = null;
+    try {
+      method = sparkListener.getClass().getMethod("stageIdToTasksComplete");
+      completedTaskInfo =
+          JavaConversions.asJavaMap((HashMap<Object, Object>) method.invoke(sparkListener)).get(
+              stage.id());
+    } catch (NoSuchMethodException | SecurityException e) {
+      logger.error("Error while getting progress", e);
+    } catch (IllegalAccessException e) {
+      logger.error("Error while getting progress", e);
+    } catch (IllegalArgumentException e) {
+      logger.error("Error while getting progress", e);
+    } catch (InvocationTargetException e) {
+      logger.error("Error while getting progress", e);
+    }
+
+    if (completedTaskInfo != null) {
+      completedTasks += (int) completedTaskInfo;
+    }
+    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
+    if (parents != null) {
+      for (Stage s : parents) {
+        int[] p = getProgressFromStage_1_0x(sparkListener, s);
+        numTasks += p[0];
+        completedTasks += p[1];
+      }
+    }
+
+    return new int[] {numTasks, completedTasks};
+  }
+
+  private int[] getProgressFromStage_1_1x(JobProgressListener sparkListener, Stage stage) {
+    int numTasks = stage.numTasks();
+    int completedTasks = 0;
+
+    try {
+      Method stageIdToData = sparkListener.getClass().getMethod("stageIdToData");
+      HashMap<Tuple2<Object, Object>, Object> stageIdData =
+          (HashMap<Tuple2<Object, Object>, Object>) stageIdToData.invoke(sparkListener);
+      Class<?> stageUIDataClass =
+          this.getClass().forName("org.apache.spark.ui.jobs.UIData$StageUIData");
+
+      Method numCompletedTasks = stageUIDataClass.getMethod("numCompleteTasks");
+
+      Set<Tuple2<Object, Object>> keys =
+          JavaConverters.asJavaSetConverter(stageIdData.keySet()).asJava();
+      for (Tuple2<Object, Object> k : keys) {
+        if (stage.id() == (int) k._1()) {
+          Object uiData = stageIdData.get(k).get();
+          completedTasks += (int) numCompletedTasks.invoke(uiData);
+        }
+      }
+    } catch (Exception e) {
+      logger.error("Error on getting progress information", e);
+    }
+
+    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
+    if (parents != null) {
+      for (Stage s : parents) {
+        int[] p = getProgressFromStage_1_1x(sparkListener, s);
+        numTasks += p[0];
+        completedTasks += p[1];
+      }
+    }
+    return new int[] {numTasks, completedTasks};
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    if (concurrentSQL()) {
+      int maxConcurrency = 10;
+      return SchedulerFactory.singleton().createOrGetParallelScheduler(
+          SparkSqlInterpreter.class.getName() + this.hashCode(), maxConcurrency);
+    } else {
+      // getSparkInterpreter() calls open() inside.
+      // That means if SparkInterpreter is not opened, it'll wait until SparkInterpreter open.
+      // In this moment UI displays 'READY' or 'FINISHED' instead of 'PENDING' or 'RUNNING'.
+      // It's because of scheduler is not created yet, and scheduler is created by this function.
+      // Therefore, we can still use getSparkInterpreter() here, but it's better and safe
+      // to getSparkInterpreter without opening it.
+      for (Interpreter intp : getInterpreterGroup()) {
+        if (intp.getClassName().equals(SparkInterpreter.class.getName())) {
+          Interpreter p = intp;
+          return p.getScheduler();
+        } else {
+          continue;
+        }
+      }
+      throw new InterpreterException("Can't find SparkInterpreter");
+    }
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/ZeppelinContext.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/ZeppelinContext.java b/spark/src/main/java/org/apache/zeppelin/spark/ZeppelinContext.java
new file mode 100644
index 0000000..87cd188
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/ZeppelinContext.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import static scala.collection.JavaConversions.asJavaCollection;
+import static scala.collection.JavaConversions.asJavaIterable;
+import static scala.collection.JavaConversions.collectionAsScalaIterable;
+
+import java.io.PrintStream;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+
+import org.apache.spark.SparkContext;
+import org.apache.spark.sql.SQLContext;
+import org.apache.spark.sql.hive.HiveContext;
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.display.Input.ParamOption;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.spark.dep.DependencyResolver;
+
+import scala.Tuple2;
+import scala.collection.Iterable;
+
+/**
+ * Spark context for zeppelin.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class ZeppelinContext extends HashMap<String, Object> {
+  private DependencyResolver dep;
+  private PrintStream out;
+  private InterpreterContext interpreterContext;
+
+  public ZeppelinContext(SparkContext sc, SQLContext sql,
+      InterpreterContext interpreterContext,
+      DependencyResolver dep, PrintStream printStream) {
+    this.sc = sc;
+    this.sqlContext = sql;
+    this.interpreterContext = interpreterContext;
+    this.dep = dep;
+    this.out = printStream;
+  }
+
+  public SparkContext sc;
+  public SQLContext sqlContext;
+  public HiveContext hiveContext;
+  private GUI gui;
+
+  /* spark-1.3
+  public SchemaRDD sql(String sql) {
+    return sqlContext.sql(sql);
+  }
+  */
+
+  /**
+   * Load dependency for interpreter and runtime (driver).
+   * And distribute them to spark cluster (sc.add())
+   *
+   * @param artifact "group:artifact:version" or file path like "/somepath/your.jar"
+   * @return
+   * @throws Exception
+   */
+  public Iterable<String> load(String artifact) throws Exception {
+    return collectionAsScalaIterable(dep.load(artifact, true));
+  }
+
+  /**
+   * Load dependency and it's transitive dependencies for interpreter and runtime (driver).
+   * And distribute them to spark cluster (sc.add())
+   *
+   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
+   * @param excludes exclusion list of transitive dependency. list of "groupId:artifactId" string.
+   * @return
+   * @throws Exception
+   */
+  public Iterable<String> load(String artifact, scala.collection.Iterable<String> excludes)
+      throws Exception {
+    return collectionAsScalaIterable(
+        dep.load(artifact,
+        asJavaCollection(excludes),
+        true));
+  }
+
+  /**
+   * Load dependency and it's transitive dependencies for interpreter and runtime (driver).
+   * And distribute them to spark cluster (sc.add())
+   *
+   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
+   * @param excludes exclusion list of transitive dependency. list of "groupId:artifactId" string.
+   * @return
+   * @throws Exception
+   */
+  public Iterable<String> load(String artifact, Collection<String> excludes) throws Exception {
+    return collectionAsScalaIterable(dep.load(artifact, excludes, true));
+  }
+
+  /**
+   * Load dependency for interpreter and runtime, and then add to sparkContext.
+   * But not adding them to spark cluster
+   *
+   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
+   * @return
+   * @throws Exception
+   */
+  public Iterable<String> loadLocal(String artifact) throws Exception {
+    return collectionAsScalaIterable(dep.load(artifact, false));
+  }
+
+
+  /**
+   * Load dependency and it's transitive dependencies and then add to sparkContext.
+   * But not adding them to spark cluster
+   *
+   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
+   * @param excludes exclusion list of transitive dependency. list of "groupId:artifactId" string.
+   * @return
+   * @throws Exception
+   */
+  public Iterable<String> loadLocal(String artifact,
+      scala.collection.Iterable<String> excludes) throws Exception {
+    return collectionAsScalaIterable(dep.load(artifact,
+        asJavaCollection(excludes), false));
+  }
+
+  /**
+   * Load dependency and it's transitive dependencies and then add to sparkContext.
+   * But not adding them to spark cluster
+   *
+   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
+   * @param excludes exclusion list of transitive dependency. list of "groupId:artifactId" string.
+   * @return
+   * @throws Exception
+   */
+  public Iterable<String> loadLocal(String artifact, Collection<String> excludes)
+      throws Exception {
+    return collectionAsScalaIterable(dep.load(artifact, excludes, false));
+  }
+
+
+  /**
+   * Add maven repository
+   *
+   * @param id id of repository ex) oss, local, snapshot
+   * @param url url of repository. supported protocol : file, http, https
+   */
+  public void addRepo(String id, String url) {
+    addRepo(id, url, false);
+  }
+
+  /**
+   * Add maven repository
+   *
+   * @param id id of repository
+   * @param url url of repository. supported protocol : file, http, https
+   * @param snapshot true if it is snapshot repository
+   */
+  public void addRepo(String id, String url, boolean snapshot) {
+    dep.addRepo(id, url, snapshot);
+  }
+
+  /**
+   * Remove maven repository by id
+   * @param id id of repository
+   */
+  public void removeRepo(String id){
+    dep.delRepo(id);
+  }
+
+  /**
+   * Load dependency only interpreter.
+   *
+   * @param name
+   * @return
+   */
+
+  public Object input(String name) {
+    return input(name, "");
+  }
+
+  public Object input(String name, Object defaultValue) {
+    return gui.input(name, defaultValue);
+  }
+
+  public Object select(String name, scala.collection.Iterable<Tuple2<Object, String>> options) {
+    return select(name, "", options);
+  }
+
+  public Object select(String name, Object defaultValue,
+      scala.collection.Iterable<Tuple2<Object, String>> options) {
+    int n = options.size();
+    ParamOption[] paramOptions = new ParamOption[n];
+    Iterator<Tuple2<Object, String>> it = asJavaIterable(options).iterator();
+
+    int i = 0;
+    while (it.hasNext()) {
+      Tuple2<Object, String> valueAndDisplayValue = it.next();
+      paramOptions[i++] = new ParamOption(valueAndDisplayValue._1(), valueAndDisplayValue._2());
+    }
+
+    return gui.select(name, "", paramOptions);
+  }
+
+  public void setGui(GUI o) {
+    this.gui = o;
+  }
+
+  public void run(String lines) {
+    /*
+    String intpName = Paragraph.getRequiredReplName(lines);
+    String scriptBody = Paragraph.getScriptBody(lines);
+    Interpreter intp = interpreterContext.getParagraph().getRepl(intpName);
+    InterpreterResult ret = intp.interpret(scriptBody, interpreterContext);
+    if (ret.code() == InterpreterResult.Code.SUCCESS) {
+      out.println("%" + ret.type().toString().toLowerCase() + " " + ret.message());
+    } else if (ret.code() == InterpreterResult.Code.ERROR) {
+      out.println("Error: " + ret.message());
+    } else if (ret.code() == InterpreterResult.Code.INCOMPLETE) {
+      out.println("Incomplete");
+    } else {
+      out.println("Unknown error");
+    }
+    */
+    throw new RuntimeException("Missing implementation");
+  }
+
+  private void restartInterpreter() {
+  }
+
+  public InterpreterContext getInterpreterContext() {
+    return interpreterContext;
+  }
+
+  public void setInterpreterContext(InterpreterContext interpreterContext) {
+    this.interpreterContext = interpreterContext;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/Booter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/Booter.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/Booter.java
new file mode 100644
index 0000000..0533804
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/Booter.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import java.io.File;
+
+import org.apache.maven.repository.internal.MavenRepositorySystemSession;
+import org.sonatype.aether.RepositorySystem;
+import org.sonatype.aether.RepositorySystemSession;
+import org.sonatype.aether.repository.LocalRepository;
+import org.sonatype.aether.repository.RemoteRepository;
+
+/**
+ * Manage mvn repository.
+ *
+ * @author anthonycorbacho
+ *
+ */
+public class Booter {
+  public static RepositorySystem newRepositorySystem() {
+    return RepositorySystemFactory.newRepositorySystem();
+  }
+
+  public static RepositorySystemSession newRepositorySystemSession(
+      RepositorySystem system, String localRepoPath) {
+    MavenRepositorySystemSession session = new MavenRepositorySystemSession();
+
+    // find homedir
+    String home = System.getenv("ZEPPELIN_HOME");
+    if (home == null) {
+      home = System.getProperty("zeppelin.home");
+    }
+    if (home == null) {
+      home = "..";
+    }
+
+    String path = home + "/" + localRepoPath;
+
+    LocalRepository localRepo =
+        new LocalRepository(new File(path).getAbsolutePath());
+    session.setLocalRepositoryManager(system.newLocalRepositoryManager(localRepo));
+
+    // session.setTransferListener(new ConsoleTransferListener());
+    // session.setRepositoryListener(new ConsoleRepositoryListener());
+
+    // uncomment to generate dirty trees
+    // session.setDependencyGraphTransformer( null );
+
+    return session;
+  }
+
+  public static RemoteRepository newCentralRepository() {
+    return new RemoteRepository("central", "default", "http://repo1.maven.org/maven2/");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/Dependency.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/Dependency.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/Dependency.java
new file mode 100644
index 0000000..ca92893
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/Dependency.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ *
+ */
+public class Dependency {
+  private String groupArtifactVersion;
+  private boolean local = false;
+  private List<String> exclusions;
+
+
+  public Dependency(String groupArtifactVersion) {
+    this.groupArtifactVersion = groupArtifactVersion;
+    exclusions = new LinkedList<String>();
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (!(o instanceof Dependency)) {
+      return false;
+    } else {
+      return ((Dependency) o).groupArtifactVersion.equals(groupArtifactVersion);
+    }
+  }
+
+  /**
+   * Don't add artifact into SparkContext (sc.addJar())
+   * @return
+   */
+  public Dependency local() {
+    local = true;
+    return this;
+  }
+
+  public Dependency excludeAll() {
+    exclude("*");
+    return this;
+  }
+
+  /**
+   *
+   * @param exclusions comma or newline separated list of "groupId:ArtifactId"
+   * @return
+   */
+  public Dependency exclude(String exclusions) {
+    for (String item : exclusions.split(",|\n")) {
+      this.exclusions.add(item);
+    }
+
+    return this;
+  }
+
+
+  public String getGroupArtifactVersion() {
+    return groupArtifactVersion;
+  }
+
+  public boolean isDist() {
+    return !local;
+  }
+
+  public List<String> getExclusions() {
+    return exclusions;
+  }
+
+  public boolean isLocalFsArtifact() {
+    int numSplits = groupArtifactVersion.split(":").length;
+    return !(numSplits >= 3 && numSplits <= 6);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/DependencyContext.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/DependencyContext.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/DependencyContext.java
new file mode 100644
index 0000000..f0fd313
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/DependencyContext.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.sonatype.aether.RepositorySystem;
+import org.sonatype.aether.RepositorySystemSession;
+import org.sonatype.aether.artifact.Artifact;
+import org.sonatype.aether.collection.CollectRequest;
+import org.sonatype.aether.graph.DependencyFilter;
+import org.sonatype.aether.repository.RemoteRepository;
+import org.sonatype.aether.resolution.ArtifactResolutionException;
+import org.sonatype.aether.resolution.ArtifactResult;
+import org.sonatype.aether.resolution.DependencyRequest;
+import org.sonatype.aether.resolution.DependencyResolutionException;
+import org.sonatype.aether.util.artifact.DefaultArtifact;
+import org.sonatype.aether.util.artifact.JavaScopes;
+import org.sonatype.aether.util.filter.DependencyFilterUtils;
+import org.sonatype.aether.util.filter.PatternExclusionsDependencyFilter;
+
+
+/**
+ *
+ */
+public class DependencyContext {
+  List<Dependency> dependencies = new LinkedList<Dependency>();
+  List<Repository> repositories = new LinkedList<Repository>();
+
+  List<File> files = new LinkedList<File>();
+  List<File> filesDist = new LinkedList<File>();
+  private RepositorySystem system = Booter.newRepositorySystem();
+  private RepositorySystemSession session;
+  private RemoteRepository mavenCentral = new RemoteRepository("central",
+      "default", "http://repo1.maven.org/maven2/");
+  private RemoteRepository mavenLocal = new RemoteRepository("local",
+      "default", "file://" + System.getProperty("user.home") + "/.m2/repository");
+
+  public DependencyContext(String localRepoPath) {
+    session =  Booter.newRepositorySystemSession(system, localRepoPath);
+  }
+
+  public Dependency load(String lib) {
+    Dependency dep = new Dependency(lib);
+
+    if (dependencies.contains(dep)) {
+      dependencies.remove(dep);
+    }
+    dependencies.add(dep);
+    return dep;
+  }
+
+  public Repository addRepo(String name) {
+    Repository rep = new Repository(name);
+    repositories.add(rep);
+    return rep;
+  }
+
+  public void reset() {
+    dependencies = new LinkedList<Dependency>();
+    repositories = new LinkedList<Repository>();
+
+    files = new LinkedList<File>();
+    filesDist = new LinkedList<File>();
+  }
+
+
+  /**
+   * fetch all artifacts
+   * @return
+   * @throws MalformedURLException
+   * @throws ArtifactResolutionException
+   * @throws DependencyResolutionException
+   */
+  public List<File> fetch() throws MalformedURLException,
+      DependencyResolutionException, ArtifactResolutionException {
+
+    for (Dependency dep : dependencies) {
+      if (!dep.isLocalFsArtifact()) {
+        List<ArtifactResult> artifacts = fetchArtifactWithDep(dep);
+        for (ArtifactResult artifact : artifacts) {
+          if (dep.isDist()) {
+            filesDist.add(artifact.getArtifact().getFile());
+          }
+          files.add(artifact.getArtifact().getFile());
+        }
+      } else {
+        if (dep.isDist()) {
+          filesDist.add(new File(dep.getGroupArtifactVersion()));
+        }
+        files.add(new File(dep.getGroupArtifactVersion()));
+      }
+    }
+
+    return files;
+  }
+
+  private List<ArtifactResult> fetchArtifactWithDep(Dependency dep)
+      throws DependencyResolutionException, ArtifactResolutionException {
+    Artifact artifact = new DefaultArtifact(
+        DependencyResolver.inferScalaVersion(dep.getGroupArtifactVersion()));
+
+    DependencyFilter classpathFlter = DependencyFilterUtils
+        .classpathFilter(JavaScopes.COMPILE);
+    PatternExclusionsDependencyFilter exclusionFilter = new PatternExclusionsDependencyFilter(
+        DependencyResolver.inferScalaVersion(dep.getExclusions()));
+
+    CollectRequest collectRequest = new CollectRequest();
+    collectRequest.setRoot(new org.sonatype.aether.graph.Dependency(artifact,
+        JavaScopes.COMPILE));
+
+    collectRequest.addRepository(mavenCentral);
+    collectRequest.addRepository(mavenLocal);
+    for (Repository repo : repositories) {
+      RemoteRepository rr = new RemoteRepository(repo.getName(), "default", repo.getUrl());
+      rr.setPolicy(repo.isSnapshot(), null);
+      collectRequest.addRepository(rr);
+    }
+
+    DependencyRequest dependencyRequest = new DependencyRequest(collectRequest,
+        DependencyFilterUtils.andFilter(exclusionFilter, classpathFlter));
+
+    return system.resolveDependencies(session, dependencyRequest).getArtifactResults();
+  }
+
+  public List<File> getFiles() {
+    return files;
+  }
+
+  public List<File> getFilesDist() {
+    return filesDist;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/DependencyResolver.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/DependencyResolver.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/DependencyResolver.java
new file mode 100644
index 0000000..06a4022
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/DependencyResolver.java
@@ -0,0 +1,350 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import java.io.File;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.spark.SparkContext;
+import org.apache.spark.repl.SparkIMain;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.sonatype.aether.RepositorySystem;
+import org.sonatype.aether.RepositorySystemSession;
+import org.sonatype.aether.artifact.Artifact;
+import org.sonatype.aether.collection.CollectRequest;
+import org.sonatype.aether.graph.Dependency;
+import org.sonatype.aether.graph.DependencyFilter;
+import org.sonatype.aether.repository.RemoteRepository;
+import org.sonatype.aether.resolution.ArtifactResult;
+import org.sonatype.aether.resolution.DependencyRequest;
+import org.sonatype.aether.util.artifact.DefaultArtifact;
+import org.sonatype.aether.util.artifact.JavaScopes;
+import org.sonatype.aether.util.filter.DependencyFilterUtils;
+import org.sonatype.aether.util.filter.PatternExclusionsDependencyFilter;
+
+import scala.Some;
+import scala.collection.IndexedSeq;
+import scala.reflect.io.AbstractFile;
+import scala.tools.nsc.Global;
+import scala.tools.nsc.backend.JavaPlatform;
+import scala.tools.nsc.util.ClassPath;
+import scala.tools.nsc.util.MergedClassPath;
+
+/**
+ * Deps resolver.
+ * Add new dependencies from mvn repo (at runetime) to Zeppelin.
+ *
+ * @author anthonycorbacho
+ *
+ */
+public class DependencyResolver {
+  Logger logger = LoggerFactory.getLogger(DependencyResolver.class);
+  private Global global;
+  private SparkIMain intp;
+  private SparkContext sc;
+  private RepositorySystem system = Booter.newRepositorySystem();
+  private List<RemoteRepository> repos = new LinkedList<RemoteRepository>();
+  private RepositorySystemSession session;
+  private DependencyFilter classpathFlter = DependencyFilterUtils.classpathFilter(
+                                                                                JavaScopes.COMPILE,
+                                                                                JavaScopes.PROVIDED,
+                                                                                JavaScopes.RUNTIME,
+                                                                                JavaScopes.SYSTEM);
+
+  private final String[] exclusions = new String[] {"org.scala-lang:scala-library",
+                                                    "org.scala-lang:scala-compiler",
+                                                    "org.scala-lang:scala-reflect",
+                                                    "org.scala-lang:scalap",
+                                                    "org.apache.zeppelin:zeppelin-zengine",
+                                                    "org.apache.zeppelin:zeppelin-spark",
+                                                    "org.apache.zeppelin:zeppelin-server"};
+
+  public DependencyResolver(SparkIMain intp, SparkContext sc, String localRepoPath) {
+    this.intp = intp;
+    this.global = intp.global();
+    this.sc = sc;
+    session = Booter.newRepositorySystemSession(system, localRepoPath);
+    repos.add(Booter.newCentralRepository()); // add maven central
+    repos.add(new RemoteRepository("local", "default", "file://"
+        + System.getProperty("user.home") + "/.m2/repository"));
+  }
+
+  public void addRepo(String id, String url, boolean snapshot) {
+    synchronized (repos) {
+      delRepo(id);
+      RemoteRepository rr = new RemoteRepository(id, "default", url);
+      rr.setPolicy(snapshot, null);
+      repos.add(rr);
+    }
+  }
+
+  public RemoteRepository delRepo(String id) {
+    synchronized (repos) {
+      Iterator<RemoteRepository> it = repos.iterator();
+      if (it.hasNext()) {
+        RemoteRepository repo = it.next();
+        if (repo.getId().equals(id)) {
+          it.remove();
+          return repo;
+        }
+      }
+    }
+    return null;
+  }
+
+  private void updateCompilerClassPath(URL[] urls) throws IllegalAccessException,
+      IllegalArgumentException, InvocationTargetException {
+
+    JavaPlatform platform = (JavaPlatform) global.platform();
+    MergedClassPath<AbstractFile> newClassPath = mergeUrlsIntoClassPath(platform, urls);
+
+    Method[] methods = platform.getClass().getMethods();
+    for (Method m : methods) {
+      if (m.getName().endsWith("currentClassPath_$eq")) {
+        m.invoke(platform, new Some(newClassPath));
+        break;
+      }
+    }
+
+    // NOTE: Must use reflection until this is exposed/fixed upstream in Scala
+    List<String> classPaths = new LinkedList<String>();
+    for (URL url : urls) {
+      classPaths.add(url.getPath());
+    }
+
+    // Reload all jars specified into our compiler
+    global.invalidateClassPathEntries(scala.collection.JavaConversions.asScalaBuffer(classPaths)
+        .toList());
+  }
+
+  // Until spark 1.1.x
+  // check https://github.com/apache/spark/commit/191d7cf2a655d032f160b9fa181730364681d0e7
+  private void updateRuntimeClassPath(URL[] urls) throws SecurityException, IllegalAccessException,
+      IllegalArgumentException, InvocationTargetException, NoSuchMethodException {
+    ClassLoader cl = intp.classLoader().getParent();
+    Method addURL;
+    addURL = cl.getClass().getDeclaredMethod("addURL", new Class[] {URL.class});
+    addURL.setAccessible(true);
+    for (URL url : urls) {
+      addURL.invoke(cl, url);
+    }
+  }
+
+  private MergedClassPath<AbstractFile> mergeUrlsIntoClassPath(JavaPlatform platform, URL[] urls) {
+    IndexedSeq<ClassPath<AbstractFile>> entries =
+        ((MergedClassPath<AbstractFile>) platform.classPath()).entries();
+    List<ClassPath<AbstractFile>> cp = new LinkedList<ClassPath<AbstractFile>>();
+
+    for (int i = 0; i < entries.size(); i++) {
+      cp.add(entries.apply(i));
+    }
+
+    for (URL url : urls) {
+      AbstractFile file;
+      if ("file".equals(url.getProtocol())) {
+        File f = new File(url.getPath());
+        if (f.isDirectory()) {
+          file = AbstractFile.getDirectory(scala.reflect.io.File.jfile2path(f));
+        } else {
+          file = AbstractFile.getFile(scala.reflect.io.File.jfile2path(f));
+        }
+      } else {
+        file = AbstractFile.getURL(url);
+      }
+
+      ClassPath<AbstractFile> newcp = platform.classPath().context().newClassPath(file);
+
+      // distinct
+      if (cp.contains(newcp) == false) {
+        cp.add(newcp);
+      }
+    }
+
+    return new MergedClassPath(scala.collection.JavaConversions.asScalaBuffer(cp).toIndexedSeq(),
+        platform.classPath().context());
+  }
+
+  public List<String> load(String artifact,
+      boolean addSparkContext) throws Exception {
+    return load(artifact, new LinkedList<String>(), addSparkContext);
+  }
+
+  public List<String> load(String artifact, Collection<String> excludes,
+      boolean addSparkContext) throws Exception {
+    if (StringUtils.isBlank(artifact)) {
+      // Should throw here
+      throw new RuntimeException("Invalid artifact to load");
+    }
+
+    // <groupId>:<artifactId>[:<extension>[:<classifier>]]:<version>
+    int numSplits = artifact.split(":").length;
+    if (numSplits >= 3 && numSplits <= 6) {
+      return loadFromMvn(artifact, excludes, addSparkContext);
+    } else {
+      loadFromFs(artifact, addSparkContext);
+      LinkedList<String> libs = new LinkedList<String>();
+      libs.add(artifact);
+      return libs;
+    }
+  }
+
+  private void loadFromFs(String artifact, boolean addSparkContext) throws Exception {
+    File jarFile = new File(artifact);
+
+    intp.global().new Run();
+
+    updateRuntimeClassPath(new URL[] {jarFile.toURI().toURL()});
+    updateCompilerClassPath(new URL[] {jarFile.toURI().toURL()});
+
+    if (addSparkContext) {
+      sc.addJar(jarFile.getAbsolutePath());
+    }
+  }
+
+  private List<String> loadFromMvn(String artifact, Collection<String> excludes,
+      boolean addSparkContext) throws Exception {
+    List<String> loadedLibs = new LinkedList<String>();
+    Collection<String> allExclusions = new LinkedList<String>();
+    allExclusions.addAll(excludes);
+    allExclusions.addAll(Arrays.asList(exclusions));
+
+    List<ArtifactResult> listOfArtifact;
+    listOfArtifact = getArtifactsWithDep(artifact, allExclusions);
+
+    Iterator<ArtifactResult> it = listOfArtifact.iterator();
+    while (it.hasNext()) {
+      Artifact a = it.next().getArtifact();
+      String gav = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion();
+      for (String exclude : allExclusions) {
+        if (gav.startsWith(exclude)) {
+          it.remove();
+          break;
+        }
+      }
+    }
+
+    List<URL> newClassPathList = new LinkedList<URL>();
+    List<File> files = new LinkedList<File>();
+    for (ArtifactResult artifactResult : listOfArtifact) {
+      logger.info("Load " + artifactResult.getArtifact().getGroupId() + ":"
+          + artifactResult.getArtifact().getArtifactId() + ":"
+          + artifactResult.getArtifact().getVersion());
+      newClassPathList.add(artifactResult.getArtifact().getFile().toURI().toURL());
+      files.add(artifactResult.getArtifact().getFile());
+      loadedLibs.add(artifactResult.getArtifact().getGroupId() + ":"
+          + artifactResult.getArtifact().getArtifactId() + ":"
+          + artifactResult.getArtifact().getVersion());
+    }
+
+    intp.global().new Run();
+    updateRuntimeClassPath(newClassPathList.toArray(new URL[0]));
+    updateCompilerClassPath(newClassPathList.toArray(new URL[0]));
+
+    if (addSparkContext) {
+      for (File f : files) {
+        sc.addJar(f.getAbsolutePath());
+      }
+    }
+
+    return loadedLibs;
+  }
+
+  /**
+   *
+   * @param dependency
+   * @param excludes list of pattern can either be of the form groupId:artifactId
+   * @return
+   * @throws Exception
+   */
+  public List<ArtifactResult> getArtifactsWithDep(String dependency,
+      Collection<String> excludes) throws Exception {
+    Artifact artifact = new DefaultArtifact(inferScalaVersion(dependency));
+    DependencyFilter classpathFlter = DependencyFilterUtils.classpathFilter( JavaScopes.COMPILE );
+    PatternExclusionsDependencyFilter exclusionFilter =
+        new PatternExclusionsDependencyFilter(inferScalaVersion(excludes));
+
+    CollectRequest collectRequest = new CollectRequest();
+    collectRequest.setRoot(new Dependency(artifact, JavaScopes.COMPILE));
+
+    synchronized (repos) {
+      for (RemoteRepository repo : repos) {
+        collectRequest.addRepository(repo);
+      }
+    }
+    DependencyRequest dependencyRequest = new DependencyRequest(collectRequest,
+        DependencyFilterUtils.andFilter(exclusionFilter, classpathFlter));
+    return system.resolveDependencies(session, dependencyRequest).getArtifactResults();
+  }
+
+  public static Collection<String> inferScalaVersion(Collection<String> artifact) {
+    List<String> list = new LinkedList<String>();
+    for (String a : artifact) {
+      list.add(inferScalaVersion(a));
+    }
+    return list;
+  }
+
+  public static String inferScalaVersion(String artifact) {
+    int pos = artifact.indexOf(":");
+    if (pos < 0 || pos + 2 >= artifact.length()) {
+      // failed to infer
+      return artifact;
+    }
+
+    if (':' == artifact.charAt(pos + 1)) {
+      String restOfthem = "";
+      String versionSep = ":";
+
+      String groupId = artifact.substring(0, pos);
+      int nextPos = artifact.indexOf(":", pos + 2);
+      if (nextPos < 0) {
+        if (artifact.charAt(artifact.length() - 1) == '*') {
+          nextPos = artifact.length() - 1;
+          versionSep = "";
+          restOfthem = "*";
+        } else {
+          versionSep = "";
+          nextPos = artifact.length();
+        }
+      }
+
+      String artifactId = artifact.substring(pos + 2, nextPos);
+      if (nextPos < artifact.length()) {
+        if (!restOfthem.equals("*")) {
+          restOfthem = artifact.substring(nextPos + 1);
+        }
+      }
+
+      String [] version = scala.util.Properties.versionNumberString().split("[.]");
+      String scalaVersion = version[0] + "." + version[1];
+
+      return groupId + ":" + artifactId + "_" + scalaVersion + versionSep + restOfthem;
+    } else {
+      return artifact;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/Repository.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/Repository.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/Repository.java
new file mode 100644
index 0000000..49c6c9b
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/Repository.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+/**
+ *
+ *
+ */
+public class Repository {
+  private boolean snapshot = false;
+  private String name;
+  private String url;
+
+  public Repository(String name){
+    this.name = name;
+  }
+
+  public Repository url(String url) {
+    this.url = url;
+    return this;
+  }
+
+  public Repository snapshot() {
+    snapshot = true;
+    return this;
+  }
+
+  public boolean isSnapshot() {
+    return snapshot;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public String getUrl() {
+    return url;
+  }
+}


[08/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterService.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterService.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterService.java
new file mode 100644
index 0000000..a64395f
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteInterpreterService.java
@@ -0,0 +1,8174 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.zeppelin.interpreter.thrift;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class RemoteInterpreterService {
+
+  public interface Iface {
+
+    public void createInterpreter(String className, Map<String,String> properties) throws org.apache.thrift.TException;
+
+    public void open(String className) throws org.apache.thrift.TException;
+
+    public void close(String className) throws org.apache.thrift.TException;
+
+    public RemoteInterpreterResult interpret(String className, String st, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException;
+
+    public void cancel(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException;
+
+    public int getProgress(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException;
+
+    public String getFormType(String className) throws org.apache.thrift.TException;
+
+    public List<String> completion(String className, String buf, int cursor) throws org.apache.thrift.TException;
+
+    public void shutdown() throws org.apache.thrift.TException;
+
+    public String getStatus(String jobId) throws org.apache.thrift.TException;
+
+  }
+
+  public interface AsyncIface {
+
+    public void createInterpreter(String className, Map<String,String> properties, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.createInterpreter_call> resultHandler) throws org.apache.thrift.TException;
+
+    public void open(String className, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.open_call> resultHandler) throws org.apache.thrift.TException;
+
+    public void close(String className, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.close_call> resultHandler) throws org.apache.thrift.TException;
+
+    public void interpret(String className, String st, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.interpret_call> resultHandler) throws org.apache.thrift.TException;
+
+    public void cancel(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.cancel_call> resultHandler) throws org.apache.thrift.TException;
+
+    public void getProgress(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getProgress_call> resultHandler) throws org.apache.thrift.TException;
+
+    public void getFormType(String className, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getFormType_call> resultHandler) throws org.apache.thrift.TException;
+
+    public void completion(String className, String buf, int cursor, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.completion_call> resultHandler) throws org.apache.thrift.TException;
+
+    public void shutdown(org.apache.thrift.async.AsyncMethodCallback<AsyncClient.shutdown_call> resultHandler) throws org.apache.thrift.TException;
+
+    public void getStatus(String jobId, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getStatus_call> resultHandler) throws org.apache.thrift.TException;
+
+  }
+
+  public static class Client extends org.apache.thrift.TServiceClient implements Iface {
+    public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
+      public Factory() {}
+      public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
+        return new Client(prot);
+      }
+      public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
+        return new Client(iprot, oprot);
+      }
+    }
+
+    public Client(org.apache.thrift.protocol.TProtocol prot)
+    {
+      super(prot, prot);
+    }
+
+    public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
+      super(iprot, oprot);
+    }
+
+    public void createInterpreter(String className, Map<String,String> properties) throws org.apache.thrift.TException
+    {
+      send_createInterpreter(className, properties);
+      recv_createInterpreter();
+    }
+
+    public void send_createInterpreter(String className, Map<String,String> properties) throws org.apache.thrift.TException
+    {
+      createInterpreter_args args = new createInterpreter_args();
+      args.setClassName(className);
+      args.setProperties(properties);
+      sendBase("createInterpreter", args);
+    }
+
+    public void recv_createInterpreter() throws org.apache.thrift.TException
+    {
+      createInterpreter_result result = new createInterpreter_result();
+      receiveBase(result, "createInterpreter");
+      return;
+    }
+
+    public void open(String className) throws org.apache.thrift.TException
+    {
+      send_open(className);
+      recv_open();
+    }
+
+    public void send_open(String className) throws org.apache.thrift.TException
+    {
+      open_args args = new open_args();
+      args.setClassName(className);
+      sendBase("open", args);
+    }
+
+    public void recv_open() throws org.apache.thrift.TException
+    {
+      open_result result = new open_result();
+      receiveBase(result, "open");
+      return;
+    }
+
+    public void close(String className) throws org.apache.thrift.TException
+    {
+      send_close(className);
+      recv_close();
+    }
+
+    public void send_close(String className) throws org.apache.thrift.TException
+    {
+      close_args args = new close_args();
+      args.setClassName(className);
+      sendBase("close", args);
+    }
+
+    public void recv_close() throws org.apache.thrift.TException
+    {
+      close_result result = new close_result();
+      receiveBase(result, "close");
+      return;
+    }
+
+    public RemoteInterpreterResult interpret(String className, String st, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
+    {
+      send_interpret(className, st, interpreterContext);
+      return recv_interpret();
+    }
+
+    public void send_interpret(String className, String st, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
+    {
+      interpret_args args = new interpret_args();
+      args.setClassName(className);
+      args.setSt(st);
+      args.setInterpreterContext(interpreterContext);
+      sendBase("interpret", args);
+    }
+
+    public RemoteInterpreterResult recv_interpret() throws org.apache.thrift.TException
+    {
+      interpret_result result = new interpret_result();
+      receiveBase(result, "interpret");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "interpret failed: unknown result");
+    }
+
+    public void cancel(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
+    {
+      send_cancel(className, interpreterContext);
+      recv_cancel();
+    }
+
+    public void send_cancel(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
+    {
+      cancel_args args = new cancel_args();
+      args.setClassName(className);
+      args.setInterpreterContext(interpreterContext);
+      sendBase("cancel", args);
+    }
+
+    public void recv_cancel() throws org.apache.thrift.TException
+    {
+      cancel_result result = new cancel_result();
+      receiveBase(result, "cancel");
+      return;
+    }
+
+    public int getProgress(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
+    {
+      send_getProgress(className, interpreterContext);
+      return recv_getProgress();
+    }
+
+    public void send_getProgress(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
+    {
+      getProgress_args args = new getProgress_args();
+      args.setClassName(className);
+      args.setInterpreterContext(interpreterContext);
+      sendBase("getProgress", args);
+    }
+
+    public int recv_getProgress() throws org.apache.thrift.TException
+    {
+      getProgress_result result = new getProgress_result();
+      receiveBase(result, "getProgress");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getProgress failed: unknown result");
+    }
+
+    public String getFormType(String className) throws org.apache.thrift.TException
+    {
+      send_getFormType(className);
+      return recv_getFormType();
+    }
+
+    public void send_getFormType(String className) throws org.apache.thrift.TException
+    {
+      getFormType_args args = new getFormType_args();
+      args.setClassName(className);
+      sendBase("getFormType", args);
+    }
+
+    public String recv_getFormType() throws org.apache.thrift.TException
+    {
+      getFormType_result result = new getFormType_result();
+      receiveBase(result, "getFormType");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getFormType failed: unknown result");
+    }
+
+    public List<String> completion(String className, String buf, int cursor) throws org.apache.thrift.TException
+    {
+      send_completion(className, buf, cursor);
+      return recv_completion();
+    }
+
+    public void send_completion(String className, String buf, int cursor) throws org.apache.thrift.TException
+    {
+      completion_args args = new completion_args();
+      args.setClassName(className);
+      args.setBuf(buf);
+      args.setCursor(cursor);
+      sendBase("completion", args);
+    }
+
+    public List<String> recv_completion() throws org.apache.thrift.TException
+    {
+      completion_result result = new completion_result();
+      receiveBase(result, "completion");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "completion failed: unknown result");
+    }
+
+    public void shutdown() throws org.apache.thrift.TException
+    {
+      send_shutdown();
+      recv_shutdown();
+    }
+
+    public void send_shutdown() throws org.apache.thrift.TException
+    {
+      shutdown_args args = new shutdown_args();
+      sendBase("shutdown", args);
+    }
+
+    public void recv_shutdown() throws org.apache.thrift.TException
+    {
+      shutdown_result result = new shutdown_result();
+      receiveBase(result, "shutdown");
+      return;
+    }
+
+    public String getStatus(String jobId) throws org.apache.thrift.TException
+    {
+      send_getStatus(jobId);
+      return recv_getStatus();
+    }
+
+    public void send_getStatus(String jobId) throws org.apache.thrift.TException
+    {
+      getStatus_args args = new getStatus_args();
+      args.setJobId(jobId);
+      sendBase("getStatus", args);
+    }
+
+    public String recv_getStatus() throws org.apache.thrift.TException
+    {
+      getStatus_result result = new getStatus_result();
+      receiveBase(result, "getStatus");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getStatus failed: unknown result");
+    }
+
+  }
+  public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
+    public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
+      private org.apache.thrift.async.TAsyncClientManager clientManager;
+      private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
+      public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
+        this.clientManager = clientManager;
+        this.protocolFactory = protocolFactory;
+      }
+      public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
+        return new AsyncClient(protocolFactory, clientManager, transport);
+      }
+    }
+
+    public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
+      super(protocolFactory, clientManager, transport);
+    }
+
+    public void createInterpreter(String className, Map<String,String> properties, org.apache.thrift.async.AsyncMethodCallback<createInterpreter_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      createInterpreter_call method_call = new createInterpreter_call(className, properties, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class createInterpreter_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String className;
+      private Map<String,String> properties;
+      public createInterpreter_call(String className, Map<String,String> properties, org.apache.thrift.async.AsyncMethodCallback<createInterpreter_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.className = className;
+        this.properties = properties;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("createInterpreter", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        createInterpreter_args args = new createInterpreter_args();
+        args.setClassName(className);
+        args.setProperties(properties);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_createInterpreter();
+      }
+    }
+
+    public void open(String className, org.apache.thrift.async.AsyncMethodCallback<open_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      open_call method_call = new open_call(className, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class open_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String className;
+      public open_call(String className, org.apache.thrift.async.AsyncMethodCallback<open_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.className = className;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("open", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        open_args args = new open_args();
+        args.setClassName(className);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_open();
+      }
+    }
+
+    public void close(String className, org.apache.thrift.async.AsyncMethodCallback<close_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      close_call method_call = new close_call(className, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class close_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String className;
+      public close_call(String className, org.apache.thrift.async.AsyncMethodCallback<close_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.className = className;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("close", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        close_args args = new close_args();
+        args.setClassName(className);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_close();
+      }
+    }
+
+    public void interpret(String className, String st, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<interpret_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      interpret_call method_call = new interpret_call(className, st, interpreterContext, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class interpret_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String className;
+      private String st;
+      private RemoteInterpreterContext interpreterContext;
+      public interpret_call(String className, String st, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<interpret_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.className = className;
+        this.st = st;
+        this.interpreterContext = interpreterContext;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("interpret", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        interpret_args args = new interpret_args();
+        args.setClassName(className);
+        args.setSt(st);
+        args.setInterpreterContext(interpreterContext);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public RemoteInterpreterResult getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_interpret();
+      }
+    }
+
+    public void cancel(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<cancel_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      cancel_call method_call = new cancel_call(className, interpreterContext, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class cancel_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String className;
+      private RemoteInterpreterContext interpreterContext;
+      public cancel_call(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<cancel_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.className = className;
+        this.interpreterContext = interpreterContext;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("cancel", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        cancel_args args = new cancel_args();
+        args.setClassName(className);
+        args.setInterpreterContext(interpreterContext);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_cancel();
+      }
+    }
+
+    public void getProgress(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<getProgress_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      getProgress_call method_call = new getProgress_call(className, interpreterContext, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class getProgress_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String className;
+      private RemoteInterpreterContext interpreterContext;
+      public getProgress_call(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<getProgress_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.className = className;
+        this.interpreterContext = interpreterContext;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getProgress", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        getProgress_args args = new getProgress_args();
+        args.setClassName(className);
+        args.setInterpreterContext(interpreterContext);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public int getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_getProgress();
+      }
+    }
+
+    public void getFormType(String className, org.apache.thrift.async.AsyncMethodCallback<getFormType_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      getFormType_call method_call = new getFormType_call(className, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class getFormType_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String className;
+      public getFormType_call(String className, org.apache.thrift.async.AsyncMethodCallback<getFormType_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.className = className;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getFormType", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        getFormType_args args = new getFormType_args();
+        args.setClassName(className);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public String getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_getFormType();
+      }
+    }
+
+    public void completion(String className, String buf, int cursor, org.apache.thrift.async.AsyncMethodCallback<completion_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      completion_call method_call = new completion_call(className, buf, cursor, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class completion_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String className;
+      private String buf;
+      private int cursor;
+      public completion_call(String className, String buf, int cursor, org.apache.thrift.async.AsyncMethodCallback<completion_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.className = className;
+        this.buf = buf;
+        this.cursor = cursor;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("completion", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        completion_args args = new completion_args();
+        args.setClassName(className);
+        args.setBuf(buf);
+        args.setCursor(cursor);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public List<String> getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_completion();
+      }
+    }
+
+    public void shutdown(org.apache.thrift.async.AsyncMethodCallback<shutdown_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      shutdown_call method_call = new shutdown_call(resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class shutdown_call extends org.apache.thrift.async.TAsyncMethodCall {
+      public shutdown_call(org.apache.thrift.async.AsyncMethodCallback<shutdown_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("shutdown", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        shutdown_args args = new shutdown_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_shutdown();
+      }
+    }
+
+    public void getStatus(String jobId, org.apache.thrift.async.AsyncMethodCallback<getStatus_call> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      getStatus_call method_call = new getStatus_call(jobId, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class getStatus_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String jobId;
+      public getStatus_call(String jobId, org.apache.thrift.async.AsyncMethodCallback<getStatus_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.jobId = jobId;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getStatus", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        getStatus_args args = new getStatus_args();
+        args.setJobId(jobId);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public String getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_getStatus();
+      }
+    }
+
+  }
+
+  public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
+    private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
+    public Processor(I iface) {
+      super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
+    }
+
+    protected Processor(I iface, Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> processMap) {
+      super(iface, getProcessMap(processMap));
+    }
+
+    private static <I extends Iface> Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> getProcessMap(Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> processMap) {
+      processMap.put("createInterpreter", new createInterpreter());
+      processMap.put("open", new open());
+      processMap.put("close", new close());
+      processMap.put("interpret", new interpret());
+      processMap.put("cancel", new cancel());
+      processMap.put("getProgress", new getProgress());
+      processMap.put("getFormType", new getFormType());
+      processMap.put("completion", new completion());
+      processMap.put("shutdown", new shutdown());
+      processMap.put("getStatus", new getStatus());
+      return processMap;
+    }
+
+    public static class createInterpreter<I extends Iface> extends org.apache.thrift.ProcessFunction<I, createInterpreter_args> {
+      public createInterpreter() {
+        super("createInterpreter");
+      }
+
+      public createInterpreter_args getEmptyArgsInstance() {
+        return new createInterpreter_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public createInterpreter_result getResult(I iface, createInterpreter_args args) throws org.apache.thrift.TException {
+        createInterpreter_result result = new createInterpreter_result();
+        iface.createInterpreter(args.className, args.properties);
+        return result;
+      }
+    }
+
+    public static class open<I extends Iface> extends org.apache.thrift.ProcessFunction<I, open_args> {
+      public open() {
+        super("open");
+      }
+
+      public open_args getEmptyArgsInstance() {
+        return new open_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public open_result getResult(I iface, open_args args) throws org.apache.thrift.TException {
+        open_result result = new open_result();
+        iface.open(args.className);
+        return result;
+      }
+    }
+
+    public static class close<I extends Iface> extends org.apache.thrift.ProcessFunction<I, close_args> {
+      public close() {
+        super("close");
+      }
+
+      public close_args getEmptyArgsInstance() {
+        return new close_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public close_result getResult(I iface, close_args args) throws org.apache.thrift.TException {
+        close_result result = new close_result();
+        iface.close(args.className);
+        return result;
+      }
+    }
+
+    public static class interpret<I extends Iface> extends org.apache.thrift.ProcessFunction<I, interpret_args> {
+      public interpret() {
+        super("interpret");
+      }
+
+      public interpret_args getEmptyArgsInstance() {
+        return new interpret_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public interpret_result getResult(I iface, interpret_args args) throws org.apache.thrift.TException {
+        interpret_result result = new interpret_result();
+        result.success = iface.interpret(args.className, args.st, args.interpreterContext);
+        return result;
+      }
+    }
+
+    public static class cancel<I extends Iface> extends org.apache.thrift.ProcessFunction<I, cancel_args> {
+      public cancel() {
+        super("cancel");
+      }
+
+      public cancel_args getEmptyArgsInstance() {
+        return new cancel_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public cancel_result getResult(I iface, cancel_args args) throws org.apache.thrift.TException {
+        cancel_result result = new cancel_result();
+        iface.cancel(args.className, args.interpreterContext);
+        return result;
+      }
+    }
+
+    public static class getProgress<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getProgress_args> {
+      public getProgress() {
+        super("getProgress");
+      }
+
+      public getProgress_args getEmptyArgsInstance() {
+        return new getProgress_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public getProgress_result getResult(I iface, getProgress_args args) throws org.apache.thrift.TException {
+        getProgress_result result = new getProgress_result();
+        result.success = iface.getProgress(args.className, args.interpreterContext);
+        result.setSuccessIsSet(true);
+        return result;
+      }
+    }
+
+    public static class getFormType<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getFormType_args> {
+      public getFormType() {
+        super("getFormType");
+      }
+
+      public getFormType_args getEmptyArgsInstance() {
+        return new getFormType_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public getFormType_result getResult(I iface, getFormType_args args) throws org.apache.thrift.TException {
+        getFormType_result result = new getFormType_result();
+        result.success = iface.getFormType(args.className);
+        return result;
+      }
+    }
+
+    public static class completion<I extends Iface> extends org.apache.thrift.ProcessFunction<I, completion_args> {
+      public completion() {
+        super("completion");
+      }
+
+      public completion_args getEmptyArgsInstance() {
+        return new completion_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public completion_result getResult(I iface, completion_args args) throws org.apache.thrift.TException {
+        completion_result result = new completion_result();
+        result.success = iface.completion(args.className, args.buf, args.cursor);
+        return result;
+      }
+    }
+
+    public static class shutdown<I extends Iface> extends org.apache.thrift.ProcessFunction<I, shutdown_args> {
+      public shutdown() {
+        super("shutdown");
+      }
+
+      public shutdown_args getEmptyArgsInstance() {
+        return new shutdown_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public shutdown_result getResult(I iface, shutdown_args args) throws org.apache.thrift.TException {
+        shutdown_result result = new shutdown_result();
+        iface.shutdown();
+        return result;
+      }
+    }
+
+    public static class getStatus<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getStatus_args> {
+      public getStatus() {
+        super("getStatus");
+      }
+
+      public getStatus_args getEmptyArgsInstance() {
+        return new getStatus_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public getStatus_result getResult(I iface, getStatus_args args) throws org.apache.thrift.TException {
+        getStatus_result result = new getStatus_result();
+        result.success = iface.getStatus(args.jobId);
+        return result;
+      }
+    }
+
+  }
+
+  public static class createInterpreter_args implements org.apache.thrift.TBase<createInterpreter_args, createInterpreter_args._Fields>, java.io.Serializable, Cloneable   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("createInterpreter_args");
+
+    private static final org.apache.thrift.protocol.TField CLASS_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("className", org.apache.thrift.protocol.TType.STRING, (short)1);
+    private static final org.apache.thrift.protocol.TField PROPERTIES_FIELD_DESC = new org.apache.thrift.protocol.TField("properties", org.apache.thrift.protocol.TType.MAP, (short)2);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new createInterpreter_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new createInterpreter_argsTupleSchemeFactory());
+    }
+
+    public String className; // required
+    public Map<String,String> properties; // required
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+      CLASS_NAME((short)1, "className"),
+      PROPERTIES((short)2, "properties");
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          case 1: // CLASS_NAME
+            return CLASS_NAME;
+          case 2: // PROPERTIES
+            return PROPERTIES;
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+
+    // isset id assignments
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      tmpMap.put(_Fields.CLASS_NAME, new org.apache.thrift.meta_data.FieldMetaData("className", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+      tmpMap.put(_Fields.PROPERTIES, new org.apache.thrift.meta_data.FieldMetaData("properties", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
+              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), 
+              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(createInterpreter_args.class, metaDataMap);
+    }
+
+    public createInterpreter_args() {
+    }
+
+    public createInterpreter_args(
+      String className,
+      Map<String,String> properties)
+    {
+      this();
+      this.className = className;
+      this.properties = properties;
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public createInterpreter_args(createInterpreter_args other) {
+      if (other.isSetClassName()) {
+        this.className = other.className;
+      }
+      if (other.isSetProperties()) {
+        Map<String,String> __this__properties = new HashMap<String,String>();
+        for (Map.Entry<String, String> other_element : other.properties.entrySet()) {
+
+          String other_element_key = other_element.getKey();
+          String other_element_value = other_element.getValue();
+
+          String __this__properties_copy_key = other_element_key;
+
+          String __this__properties_copy_value = other_element_value;
+
+          __this__properties.put(__this__properties_copy_key, __this__properties_copy_value);
+        }
+        this.properties = __this__properties;
+      }
+    }
+
+    public createInterpreter_args deepCopy() {
+      return new createInterpreter_args(this);
+    }
+
+    @Override
+    public void clear() {
+      this.className = null;
+      this.properties = null;
+    }
+
+    public String getClassName() {
+      return this.className;
+    }
+
+    public createInterpreter_args setClassName(String className) {
+      this.className = className;
+      return this;
+    }
+
+    public void unsetClassName() {
+      this.className = null;
+    }
+
+    /** Returns true if field className is set (has been assigned a value) and false otherwise */
+    public boolean isSetClassName() {
+      return this.className != null;
+    }
+
+    public void setClassNameIsSet(boolean value) {
+      if (!value) {
+        this.className = null;
+      }
+    }
+
+    public int getPropertiesSize() {
+      return (this.properties == null) ? 0 : this.properties.size();
+    }
+
+    public void putToProperties(String key, String val) {
+      if (this.properties == null) {
+        this.properties = new HashMap<String,String>();
+      }
+      this.properties.put(key, val);
+    }
+
+    public Map<String,String> getProperties() {
+      return this.properties;
+    }
+
+    public createInterpreter_args setProperties(Map<String,String> properties) {
+      this.properties = properties;
+      return this;
+    }
+
+    public void unsetProperties() {
+      this.properties = null;
+    }
+
+    /** Returns true if field properties is set (has been assigned a value) and false otherwise */
+    public boolean isSetProperties() {
+      return this.properties != null;
+    }
+
+    public void setPropertiesIsSet(boolean value) {
+      if (!value) {
+        this.properties = null;
+      }
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      case CLASS_NAME:
+        if (value == null) {
+          unsetClassName();
+        } else {
+          setClassName((String)value);
+        }
+        break;
+
+      case PROPERTIES:
+        if (value == null) {
+          unsetProperties();
+        } else {
+          setProperties((Map<String,String>)value);
+        }
+        break;
+
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      case CLASS_NAME:
+        return getClassName();
+
+      case PROPERTIES:
+        return getProperties();
+
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      case CLASS_NAME:
+        return isSetClassName();
+      case PROPERTIES:
+        return isSetProperties();
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof createInterpreter_args)
+        return this.equals((createInterpreter_args)that);
+      return false;
+    }
+
+    public boolean equals(createInterpreter_args that) {
+      if (that == null)
+        return false;
+
+      boolean this_present_className = true && this.isSetClassName();
+      boolean that_present_className = true && that.isSetClassName();
+      if (this_present_className || that_present_className) {
+        if (!(this_present_className && that_present_className))
+          return false;
+        if (!this.className.equals(that.className))
+          return false;
+      }
+
+      boolean this_present_properties = true && this.isSetProperties();
+      boolean that_present_properties = true && that.isSetProperties();
+      if (this_present_properties || that_present_properties) {
+        if (!(this_present_properties && that_present_properties))
+          return false;
+        if (!this.properties.equals(that.properties))
+          return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    public int compareTo(createInterpreter_args other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+      createInterpreter_args typedOther = (createInterpreter_args)other;
+
+      lastComparison = Boolean.valueOf(isSetClassName()).compareTo(typedOther.isSetClassName());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetClassName()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.className, typedOther.className);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      lastComparison = Boolean.valueOf(isSetProperties()).compareTo(typedOther.isSetProperties());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetProperties()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.properties, typedOther.properties);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("createInterpreter_args(");
+      boolean first = true;
+
+      sb.append("className:");
+      if (this.className == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.className);
+      }
+      first = false;
+      if (!first) sb.append(", ");
+      sb.append("properties:");
+      if (this.properties == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.properties);
+      }
+      first = false;
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class createInterpreter_argsStandardSchemeFactory implements SchemeFactory {
+      public createInterpreter_argsStandardScheme getScheme() {
+        return new createInterpreter_argsStandardScheme();
+      }
+    }
+
+    private static class createInterpreter_argsStandardScheme extends StandardScheme<createInterpreter_args> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, createInterpreter_args struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 1: // CLASS_NAME
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                struct.className = iprot.readString();
+                struct.setClassNameIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            case 2: // PROPERTIES
+              if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+                {
+                  org.apache.thrift.protocol.TMap _map0 = iprot.readMapBegin();
+                  struct.properties = new HashMap<String,String>(2*_map0.size);
+                  for (int _i1 = 0; _i1 < _map0.size; ++_i1)
+                  {
+                    String _key2; // required
+                    String _val3; // required
+                    _key2 = iprot.readString();
+                    _val3 = iprot.readString();
+                    struct.properties.put(_key2, _val3);
+                  }
+                  iprot.readMapEnd();
+                }
+                struct.setPropertiesIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+
+        // check for required fields of primitive type, which can't be checked in the validate method
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, createInterpreter_args struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (struct.className != null) {
+          oprot.writeFieldBegin(CLASS_NAME_FIELD_DESC);
+          oprot.writeString(struct.className);
+          oprot.writeFieldEnd();
+        }
+        if (struct.properties != null) {
+          oprot.writeFieldBegin(PROPERTIES_FIELD_DESC);
+          {
+            oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.properties.size()));
+            for (Map.Entry<String, String> _iter4 : struct.properties.entrySet())
+            {
+              oprot.writeString(_iter4.getKey());
+              oprot.writeString(_iter4.getValue());
+            }
+            oprot.writeMapEnd();
+          }
+          oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class createInterpreter_argsTupleSchemeFactory implements SchemeFactory {
+      public createInterpreter_argsTupleScheme getScheme() {
+        return new createInterpreter_argsTupleScheme();
+      }
+    }
+
+    private static class createInterpreter_argsTupleScheme extends TupleScheme<createInterpreter_args> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, createInterpreter_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+        BitSet optionals = new BitSet();
+        if (struct.isSetClassName()) {
+          optionals.set(0);
+        }
+        if (struct.isSetProperties()) {
+          optionals.set(1);
+        }
+        oprot.writeBitSet(optionals, 2);
+        if (struct.isSetClassName()) {
+          oprot.writeString(struct.className);
+        }
+        if (struct.isSetProperties()) {
+          {
+            oprot.writeI32(struct.properties.size());
+            for (Map.Entry<String, String> _iter5 : struct.properties.entrySet())
+            {
+              oprot.writeString(_iter5.getKey());
+              oprot.writeString(_iter5.getValue());
+            }
+          }
+        }
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, createInterpreter_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+        BitSet incoming = iprot.readBitSet(2);
+        if (incoming.get(0)) {
+          struct.className = iprot.readString();
+          struct.setClassNameIsSet(true);
+        }
+        if (incoming.get(1)) {
+          {
+            org.apache.thrift.protocol.TMap _map6 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+            struct.properties = new HashMap<String,String>(2*_map6.size);
+            for (int _i7 = 0; _i7 < _map6.size; ++_i7)
+            {
+              String _key8; // required
+              String _val9; // required
+              _key8 = iprot.readString();
+              _val9 = iprot.readString();
+              struct.properties.put(_key8, _val9);
+            }
+          }
+          struct.setPropertiesIsSet(true);
+        }
+      }
+    }
+
+  }
+
+  public static class createInterpreter_result implements org.apache.thrift.TBase<createInterpreter_result, createInterpreter_result._Fields>, java.io.Serializable, Cloneable   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("createInterpreter_result");
+
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new createInterpreter_resultStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new createInterpreter_resultTupleSchemeFactory());
+    }
+
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+;
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(createInterpreter_result.class, metaDataMap);
+    }
+
+    public createInterpreter_result() {
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public createInterpreter_result(createInterpreter_result other) {
+    }
+
+    public createInterpreter_result deepCopy() {
+      return new createInterpreter_result(this);
+    }
+
+    @Override
+    public void clear() {
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof createInterpreter_result)
+        return this.equals((createInterpreter_result)that);
+      return false;
+    }
+
+    public boolean equals(createInterpreter_result that) {
+      if (that == null)
+        return false;
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    public int compareTo(createInterpreter_result other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+      createInterpreter_result typedOther = (createInterpreter_result)other;
+
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+      }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("createInterpreter_result(");
+      boolean first = true;
+
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class createInterpreter_resultStandardSchemeFactory implements SchemeFactory {
+      public createInterpreter_resultStandardScheme getScheme() {
+        return new createInterpreter_resultStandardScheme();
+      }
+    }
+
+    private static class createInterpreter_resultStandardScheme extends StandardScheme<createInterpreter_result> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, createInterpreter_result struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+
+        // check for required fields of primitive type, which can't be checked in the validate method
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, createInterpreter_result struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class createInterpreter_resultTupleSchemeFactory implements SchemeFactory {
+      public createInterpreter_resultTupleScheme getScheme() {
+        return new createInterpreter_resultTupleScheme();
+      }
+    }
+
+    private static class createInterpreter_resultTupleScheme extends TupleScheme<createInterpreter_result> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, createInterpreter_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, createInterpreter_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+      }
+    }
+
+  }
+
+  public static class open_args implements org.apache.thrift.TBase<open_args, open_args._Fields>, java.io.Serializable, Cloneable   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("open_args");
+
+    private static final org.apache.thrift.protocol.TField CLASS_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("className", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new open_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new open_argsTupleSchemeFactory());
+    }
+
+    public String className; // required
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+      CLASS_NAME((short)1, "className");
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          case 1: // CLASS_NAME
+            return CLASS_NAME;
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+
+    // isset id assignments
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      tmpMap.put(_Fields.CLASS_NAME, new org.apache.thrift.meta_data.FieldMetaData("className", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(open_args.class, metaDataMap);
+    }
+
+    public open_args() {
+    }
+
+    public open_args(
+      String className)
+    {
+      this();
+      this.className = className;
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public open_args(open_args other) {
+      if (other.isSetClassName()) {
+        this.className = other.className;
+      }
+    }
+
+    public open_args deepCopy() {
+      return new open_args(this);
+    }
+
+    @Override
+    public void clear() {
+      this.className = null;
+    }
+
+    public String getClassName() {
+      return this.className;
+    }
+
+    public open_args setClassName(String className) {
+      this.className = className;
+      return this;
+    }
+
+    public void unsetClassName() {
+      this.className = null;
+    }
+
+    /** Returns true if field className is set (has been assigned a value) and false otherwise */
+    public boolean isSetClassName() {
+      return this.className != null;
+    }
+
+    public void setClassNameIsSet(boolean value) {
+      if (!value) {
+        this.className = null;
+      }
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      case CLASS_NAME:
+        if (value == null) {
+          unsetClassName();
+        } else {
+          setClassName((String)value);
+        }
+        break;
+
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      case CLASS_NAME:
+        return getClassName();
+
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      case CLASS_NAME:
+        return isSetClassName();
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof open_args)
+        return this.equals((open_args)that);
+      return false;
+    }
+
+    public boolean equals(open_args that) {
+      if (that == null)
+        return false;
+
+      boolean this_present_className = true && this.isSetClassName();
+      boolean that_present_className = true && that.isSetClassName();
+      if (this_present_className || that_present_className) {
+        if (!(this_present_className && that_present_className))
+          return false;
+        if (!this.className.equals(that.className))
+          return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    public int compareTo(open_args other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+      open_args typedOther = (open_args)other;
+
+      lastComparison = Boolean.valueOf(isSetClassName()).compareTo(typedOther.isSetClassName());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetClassName()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.className, typedOther.className);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("open_args(");
+      boolean first = true;
+
+      sb.append("className:");
+      if (this.className == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.className);
+      }
+      first = false;
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class open_argsStandardSchemeFactory implements SchemeFactory {
+      public open_argsStandardScheme getScheme() {
+        return new open_argsStandardScheme();
+      }
+    }
+
+    private static class open_argsStandardScheme extends StandardScheme<open_args> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, open_args struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 1: // CLASS_NAME
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                struct.className = iprot.readString();
+                struct.setClassNameIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+
+        // check for required fields of primitive type, which can't be checked in the validate method
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, open_args struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (struct.className != null) {
+          oprot.writeFieldBegin(CLASS_NAME_FIELD_DESC);
+          oprot.writeString(struct.className);
+          oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class open_argsTupleSchemeFactory implements SchemeFactory {
+      public open_argsTupleScheme getScheme() {
+        return new open_argsTupleScheme();
+      }
+    }
+
+    private static class open_argsTupleScheme extends TupleScheme<open_args> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, open_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+        BitSet optionals = new BitSet();
+        if (struct.isSetClassName()) {
+          optionals.set(0);
+        }
+        oprot.writeBitSet(optionals, 1);
+        if (struct.isSetClassName()) {
+          oprot.writeString(struct.className);
+        }
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, open_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+        BitSet incoming = iprot.readBitSet(1);
+        if (incoming.get(0)) {
+          struct.className = iprot.readString();
+          struct.setClassNameIsSet(true);
+        }
+      }
+    }
+
+  }
+
+  public static class open_result implements org.apache.thrift.TBase<open_result, open_result._Fields>, java.io.Serializable, Cloneable   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("open_result");
+
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new open_resultStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new open_resultTupleSchemeFactory());
+    }
+
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+;
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(open_result.class, metaDataMap);
+    }
+
+    public open_result() {
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public open_result(open_result other) {
+    }
+
+    public open_result deepCopy() {
+      return new open_result(this);
+    }
+
+    @Override
+    public void clear() {
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof open_result)
+        return this.equals((open_result)that);
+      return false;
+    }
+
+    public boolean equals(open_result that) {
+      if (that == null)
+        return false;
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    public int compareTo(open_result other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+      open_result typedOther = (open_result)other;
+
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+      }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("open_result(");
+      boolean first = true;
+
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class open_resultStandardSchemeFactory implements SchemeFactory {
+      public open_resultStandardScheme getScheme() {
+        return new open_resultStandardScheme();
+      }
+    }
+
+    private static class open_resultStandardScheme extends StandardScheme<open_result> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, open_result struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+
+        // check for required fields of primitive type, which can't be checked in the validate method
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, open_result struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class open_resultTupleSchemeFactory implements SchemeFactory {
+      public open_resultTupleScheme getScheme() {
+        return new open_resultTupleScheme();
+      }
+    }
+
+    private static class open_resultTupleScheme extends TupleScheme<open_result> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, open_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, open_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+      }
+    }
+
+  }
+
+  public static class close_args implements org.apache.thrift.TBase<close_args, close_args._Fields>, java.io.Serializable, Cloneable   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("close_args");
+
+    private static final org.apache.thrift.protocol.TField CLASS_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("className", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new close_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new close_argsTupleSchemeFactory());
+    }
+
+    public String className; // required


<TRUNCATED>

[07/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/FIFOScheduler.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/FIFOScheduler.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/FIFOScheduler.java
new file mode 100644
index 0000000..e7f950a
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/FIFOScheduler.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+
+import org.apache.zeppelin.scheduler.Job.Status;
+
+/**
+ * TODO(moon) : add description.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class FIFOScheduler implements Scheduler {
+  List<Job> queue = new LinkedList<Job>();
+  private ExecutorService executor;
+  private SchedulerListener listener;
+  boolean terminate = false;
+  Job runningJob = null;
+  private String name;
+
+  public FIFOScheduler(String name, ExecutorService executor, SchedulerListener listener) {
+    this.name = name;
+    this.executor = executor;
+    this.listener = listener;
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  @Override
+  public Collection<Job> getJobsWaiting() {
+    List<Job> ret = new LinkedList<Job>();
+    synchronized (queue) {
+      for (Job job : queue) {
+        ret.add(job);
+      }
+    }
+    return ret;
+  }
+
+  @Override
+  public Collection<Job> getJobsRunning() {
+    List<Job> ret = new LinkedList<Job>();
+    Job job = runningJob;
+
+    if (job != null) {
+      ret.add(job);
+    }
+
+    return ret;
+  }
+
+
+
+  @Override
+  public void submit(Job job) {
+    job.setStatus(Status.PENDING);
+    synchronized (queue) {
+      queue.add(job);
+      queue.notify();
+    }
+  }
+
+  @Override
+  public void run() {
+
+    synchronized (queue) {
+      while (terminate == false) {
+        if (runningJob != null || queue.isEmpty() == true) {
+          try {
+            queue.wait(500);
+          } catch (InterruptedException e) {
+          }
+          continue;
+        }
+
+        runningJob = queue.remove(0);
+
+        final Scheduler scheduler = this;
+        this.executor.execute(new Runnable() {
+          @Override
+          public void run() {
+            if (runningJob.isAborted()) {
+              runningJob.setStatus(Status.ABORT);
+              runningJob.aborted = false;
+              synchronized (queue) {
+                queue.notify();
+              }
+              return;
+            }
+
+            runningJob.setStatus(Status.RUNNING);
+            if (listener != null) {
+              listener.jobStarted(scheduler, runningJob);
+            }
+            runningJob.run();
+            if (runningJob.isAborted()) {
+              runningJob.setStatus(Status.ABORT);
+            } else {
+              if (runningJob.getException() != null) {
+                runningJob.setStatus(Status.ERROR);
+              } else {
+                runningJob.setStatus(Status.FINISHED);
+              }
+            }
+            if (listener != null) {
+              listener.jobFinished(scheduler, runningJob);
+            }
+            // reset aborted flag to allow retry
+            runningJob.aborted = false;
+            runningJob = null;
+            synchronized (queue) {
+              queue.notify();
+            }
+          }
+        });
+      }
+    }
+  }
+
+  @Override
+  public void stop() {
+    terminate = true;
+    synchronized (queue) {
+      queue.notify();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Job.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Job.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Job.java
new file mode 100644
index 0000000..9837ad2
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Job.java
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Map;
+
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * Skeletal implementation of the Job concept.
+ *  - designed for inheritance
+ *  - should be run on a separate thread
+ *  - maintains internal state: it's status
+ *  - supports listeners who are updated on status change
+ *
+ *  Job class is serialized/deserialized and used server<->client communication
+ *  and saving/loading jobs from disk.
+ *  Changing/adding/deleting non transitive field name need consideration of that.
+ *
+ *  @author Leemoonsoo
+ */
+public abstract class Job {
+  /**
+   * Job status.
+   *
+   * READY - Job is not running, ready to run.
+   * PENDING - Job is submitted to scheduler. but not running yet
+   * RUNNING - Job is running.
+   * FINISHED - Job finished run. with success
+   * ERROR - Job finished run. with error
+   * ABORT - Job finished by abort
+   *
+   */
+  public static enum Status {
+    READY,
+    PENDING,
+    RUNNING,
+    FINISHED,
+    ERROR,
+    ABORT;
+    boolean isReady() {
+      return this == READY;
+    }
+
+    boolean isRunning() {
+      return this == RUNNING;
+    }
+
+    boolean isPending() {
+      return this == PENDING;
+    }
+  }
+
+  private String jobName;
+  String id;
+  Object result;
+  Date dateCreated;
+  Date dateStarted;
+  Date dateFinished;
+  Status status;
+
+  transient boolean aborted = false;
+
+  String errorMessage;
+  private transient Throwable exception;
+  private transient JobListener listener;
+  private long progressUpdateIntervalMs;
+
+  public Job(String jobName, JobListener listener, long progressUpdateIntervalMs) {
+    this.jobName = jobName;
+    this.listener = listener;
+    this.progressUpdateIntervalMs = progressUpdateIntervalMs;
+
+    dateCreated = new Date();
+    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd-HHmmss");
+    id = dateFormat.format(dateCreated) + "_" + super.hashCode();
+
+    setStatus(Status.READY);
+  }
+
+  public Job(String jobName, JobListener listener) {
+    this(jobName, listener, JobProgressPoller.DEFAULT_INTERVAL_MSEC);
+  }
+
+  public Job(String jobId, String jobName, JobListener listener, long progressUpdateIntervalMs) {
+    this.jobName = jobName;
+    this.listener = listener;
+    this.progressUpdateIntervalMs = progressUpdateIntervalMs;
+
+    id = jobId;
+
+    setStatus(Status.READY);
+  }
+
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public int hashCode() {
+    return id.hashCode();
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    return ((Job) o).hashCode() == hashCode();
+  }
+
+  public Status getStatus() {
+    return status;
+  }
+
+  public void setStatus(Status status) {
+    if (this.status == status) {
+      return;
+    }
+    Status before = this.status;
+    Status after = status;
+    if (listener != null) {
+      listener.beforeStatusChange(this, before, after);
+    }
+    this.status = status;
+    if (listener != null) {
+      listener.afterStatusChange(this, before, after);
+    }
+  }
+
+  public void setListener(JobListener listener) {
+    this.listener = listener;
+  }
+
+  public JobListener getListener() {
+    return listener;
+  }
+
+  public boolean isTerminated() {
+    return !this.status.isReady() && !this.status.isRunning() && !this.status.isPending();
+  }
+
+  public boolean isRunning() {
+    return this.status.isRunning();
+  }
+
+  public void run() {
+    JobProgressPoller progressUpdator = null;
+    try {
+      progressUpdator = new JobProgressPoller(this, progressUpdateIntervalMs);
+      progressUpdator.start();
+      dateStarted = new Date();
+      result = jobRun();
+      this.exception = null;
+      errorMessage = null;
+      dateFinished = new Date();
+      progressUpdator.terminate();
+    } catch (NullPointerException e) {
+      logger().error("Job failed", e);
+      progressUpdator.terminate();
+      this.exception = e;
+      result = e.getMessage();
+      errorMessage = getStack(e);
+      dateFinished = new Date();
+    } catch (Throwable e) {
+      logger().error("Job failed", e);
+      progressUpdator.terminate();
+      this.exception = e;
+      result = e.getMessage();
+      errorMessage = getStack(e);
+      dateFinished = new Date();
+    } finally {
+      //aborted = false;
+    }
+  }
+
+  public String getStack(Throwable e) {
+    StackTraceElement[] stacks = e.getStackTrace();
+    if (stacks == null) {
+      return "";
+    }
+    String ss = "";
+    for (StackTraceElement s : stacks) {
+      ss += s.toString() + "\n";
+    }
+
+    return ss;
+  }
+
+  public Throwable getException() {
+    return exception;
+  }
+
+  protected void setException(Throwable t) {
+    exception = t;
+    errorMessage = getStack(t);
+  }
+
+  public Object getReturn() {
+    return result;
+  }
+
+  public String getJobName() {
+    return jobName;
+  }
+
+  public void setJobName(String jobName) {
+    this.jobName = jobName;
+  }
+
+  public abstract int progress();
+
+  public abstract Map<String, Object> info();
+
+  protected abstract Object jobRun() throws Throwable;
+
+  protected abstract boolean jobAbort();
+
+  public void abort() {
+    aborted = jobAbort();
+  }
+
+  public boolean isAborted() {
+    return aborted;
+  }
+
+  public Date getDateCreated() {
+    return dateCreated;
+  }
+
+  public Date getDateStarted() {
+    return dateStarted;
+  }
+
+  public Date getDateFinished() {
+    return dateFinished;
+  }
+
+  private Logger logger() {
+    return LoggerFactory.getLogger(Job.class);
+  }
+
+  protected void setResult(Object result) {
+    this.result = result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobListener.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobListener.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobListener.java
new file mode 100644
index 0000000..1ed551f
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobListener.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+/**
+ * TODO(moon) : add description.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public interface JobListener {
+  public void onProgressUpdate(Job job, int progress);
+
+  public void beforeStatusChange(Job job, Job.Status before, Job.Status after);
+
+  public void afterStatusChange(Job job, Job.Status before, Job.Status after);
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobProgressPoller.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobProgressPoller.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobProgressPoller.java
new file mode 100644
index 0000000..9de1325
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/JobProgressPoller.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * TODO(moon) : add description.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class JobProgressPoller extends Thread {
+  public static final long DEFAULT_INTERVAL_MSEC = 500;
+  Logger logger = LoggerFactory.getLogger(JobProgressPoller.class);
+  private Job job;
+  private long intervalMs;
+  boolean terminate = false;
+
+  public JobProgressPoller(Job job, long intervalMs) {
+    this.job = job;
+    this.intervalMs = intervalMs;
+  }
+
+  @Override
+  public void run() {
+    if (intervalMs < 0) {
+      return;
+    } else if (intervalMs == 0) {
+      intervalMs = DEFAULT_INTERVAL_MSEC;
+    }
+
+    while (terminate == false) {
+      JobListener listener = job.getListener();
+      if (listener != null) {
+        try {
+          if (job.isRunning()) {
+            listener.onProgressUpdate(job, job.progress());
+          }
+        } catch (Exception e) {
+          logger.error("Can not get or update progress", e);
+        }
+      }
+      try {
+        Thread.sleep(intervalMs);
+      } catch (InterruptedException e) {
+      }
+    }
+  }
+
+  public void terminate() {
+    terminate = true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/ParallelScheduler.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/ParallelScheduler.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/ParallelScheduler.java
new file mode 100644
index 0000000..c8e8e04
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/ParallelScheduler.java
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+
+import org.apache.zeppelin.scheduler.Job.Status;
+
+/**
+ * TODO(moon) : add description.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class ParallelScheduler implements Scheduler {
+  List<Job> queue = new LinkedList<Job>();
+  List<Job> running = new LinkedList<Job>();
+  private ExecutorService executor;
+  private SchedulerListener listener;
+  boolean terminate = false;
+  private String name;
+  private int maxConcurrency;
+
+  public ParallelScheduler(String name, ExecutorService executor, SchedulerListener listener,
+      int maxConcurrency) {
+    this.name = name;
+    this.executor = executor;
+    this.listener = listener;
+    this.maxConcurrency = maxConcurrency;
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  @Override
+  public Collection<Job> getJobsWaiting() {
+    List<Job> ret = new LinkedList<Job>();
+    synchronized (queue) {
+      for (Job job : queue) {
+        ret.add(job);
+      }
+    }
+    return ret;
+  }
+
+  @Override
+  public Collection<Job> getJobsRunning() {
+    List<Job> ret = new LinkedList<Job>();
+    synchronized (queue) {
+      for (Job job : running) {
+        ret.add(job);
+      }
+    }
+    return ret;
+  }
+
+
+
+  @Override
+  public void submit(Job job) {
+    job.setStatus(Status.PENDING);
+    synchronized (queue) {
+      queue.add(job);
+      queue.notify();
+    }
+  }
+
+  @Override
+  public void run() {
+
+    synchronized (queue) {
+      while (terminate == false) {
+        if (running.size() >= maxConcurrency || queue.isEmpty() == true) {
+          try {
+            queue.wait(500);
+          } catch (InterruptedException e) {
+          }
+          continue;
+        }
+
+        Job job = queue.remove(0);
+        running.add(job);
+        Scheduler scheduler = this;
+
+        executor.execute(new JobRunner(scheduler, job));
+      }
+
+
+    }
+  }
+
+  public void setMaxConcurrency(int maxConcurrency) {
+    this.maxConcurrency = maxConcurrency;
+    synchronized (queue) {
+      queue.notify();
+    }
+  }
+
+  private class JobRunner implements Runnable {
+    private Scheduler scheduler;
+    private Job job;
+
+    public JobRunner(Scheduler scheduler, Job job) {
+      this.scheduler = scheduler;
+      this.job = job;
+    }
+
+    @Override
+    public void run() {
+      if (job.isAborted()) {
+        job.setStatus(Status.ABORT);
+        job.aborted = false;
+
+        synchronized (queue) {
+          running.remove(job);
+          queue.notify();
+        }
+
+        return;
+      }
+
+      job.setStatus(Status.RUNNING);
+      if (listener != null) {
+        listener.jobStarted(scheduler, job);
+      }
+      job.run();
+      if (job.isAborted()) {
+        job.setStatus(Status.ABORT);
+      } else {
+        if (job.getException() != null) {
+          job.setStatus(Status.ERROR);
+        } else {
+          job.setStatus(Status.FINISHED);
+        }
+      }
+
+      if (listener != null) {
+        listener.jobFinished(scheduler, job);
+      }
+
+      // reset aborted flag to allow retry
+      job.aborted = false;
+      synchronized (queue) {
+        running.remove(job);
+        queue.notify();
+      }
+    }
+  }
+
+
+  @Override
+  public void stop() {
+    terminate = true;
+    synchronized (queue) {
+      queue.notify();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/RemoteScheduler.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/RemoteScheduler.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/RemoteScheduler.java
new file mode 100644
index 0000000..15e4a3c
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/RemoteScheduler.java
@@ -0,0 +1,373 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+
+import org.apache.thrift.TException;
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
+import org.apache.zeppelin.scheduler.Job.Status;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ */
+public class RemoteScheduler implements Scheduler {
+  Logger logger = LoggerFactory.getLogger(RemoteScheduler.class);
+
+  List<Job> queue = new LinkedList<Job>();
+  List<Job> running = new LinkedList<Job>();
+  private ExecutorService executor;
+  private SchedulerListener listener;
+  boolean terminate = false;
+  private String name;
+  private int maxConcurrency;
+  private RemoteInterpreterProcess interpreterProcess;
+
+  public RemoteScheduler(String name, ExecutorService executor,
+      RemoteInterpreterProcess interpreterProcess, SchedulerListener listener,
+      int maxConcurrency) {
+    this.name = name;
+    this.executor = executor;
+    this.listener = listener;
+    this.interpreterProcess = interpreterProcess;
+    this.maxConcurrency = maxConcurrency;
+  }
+
+  @Override
+  public void run() {
+    while (terminate == false) {
+      Job job = null;
+
+      synchronized (queue) {
+        if (running.size() >= maxConcurrency || queue.isEmpty() == true) {
+          try {
+            queue.wait(500);
+          } catch (InterruptedException e) {
+          }
+          continue;
+        }
+
+        job = queue.remove(0);
+        running.add(job);
+      }
+
+      // run
+      Scheduler scheduler = this;
+      JobRunner jobRunner = new JobRunner(scheduler, job);
+      executor.execute(jobRunner);
+
+      // wait until it is submitted to the remote
+      while (!jobRunner.isJobSubmittedInRemote()) {
+        synchronized (queue) {
+          try {
+            queue.wait(500);
+          } catch (InterruptedException e) {
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  @Override
+  public Collection<Job> getJobsWaiting() {
+    List<Job> ret = new LinkedList<Job>();
+    synchronized (queue) {
+      for (Job job : queue) {
+        ret.add(job);
+      }
+    }
+    return ret;
+  }
+
+  @Override
+  public Collection<Job> getJobsRunning() {
+    List<Job> ret = new LinkedList<Job>();
+    synchronized (queue) {
+      for (Job job : running) {
+        ret.add(job);
+      }
+    }
+    return ret;
+  }
+
+  @Override
+  public void submit(Job job) {
+    job.setStatus(Status.PENDING);
+
+    synchronized (queue) {
+      queue.add(job);
+      queue.notify();
+    }
+  }
+
+  public void setMaxConcurrency(int maxConcurrency) {
+    this.maxConcurrency = maxConcurrency;
+    synchronized (queue) {
+      queue.notify();
+    }
+  }
+
+  /**
+   * Role of the class is get status info from remote process from PENDING to
+   * RUNNING status.
+   */
+  private class JobStatusPoller extends Thread {
+    private long initialPeriodMsec;
+    private long initialPeriodCheckIntervalMsec;
+    private long checkIntervalMsec;
+    private boolean terminate;
+    private JobListener listener;
+    private Job job;
+    Status lastStatus;
+
+    public JobStatusPoller(long initialPeriodMsec,
+        long initialPeriodCheckIntervalMsec, long checkIntervalMsec, Job job,
+        JobListener listener) {
+      this.initialPeriodMsec = initialPeriodMsec;
+      this.initialPeriodCheckIntervalMsec = initialPeriodCheckIntervalMsec;
+      this.checkIntervalMsec = checkIntervalMsec;
+      this.job = job;
+      this.listener = listener;
+      this.terminate = false;
+    }
+
+    @Override
+    public void run() {
+      long started = System.currentTimeMillis();
+      while (terminate == false) {
+        long current = System.currentTimeMillis();
+        long interval;
+        if (current - started < initialPeriodMsec) {
+          interval = initialPeriodCheckIntervalMsec;
+        } else {
+          interval = checkIntervalMsec;
+        }
+
+        synchronized (this) {
+          try {
+            this.wait(interval);
+          } catch (InterruptedException e) {
+          }
+        }
+
+
+        Status newStatus = getStatus();
+        if (newStatus == null) { // unknown
+          continue;
+        }
+
+        if (newStatus != Status.READY && newStatus != Status.PENDING) {
+          // we don't need more
+          continue;
+        }
+      }
+    }
+
+    public void shutdown() {
+      terminate = true;
+      synchronized (this) {
+        this.notify();
+      }
+    }
+
+
+    private Status getLastStatus() {
+      if (terminate == true) {
+        if (lastStatus != Status.FINISHED &&
+            lastStatus != Status.ERROR &&
+            lastStatus != Status.ABORT) {
+          return Status.FINISHED;
+        } else {
+          return (lastStatus == null) ? Status.FINISHED : lastStatus;
+        }
+      } else {
+        return (lastStatus == null) ? Status.FINISHED : lastStatus;
+      }
+    }
+
+    public synchronized Job.Status getStatus() {
+      if (interpreterProcess.referenceCount() <= 0) {
+        return getLastStatus();
+      }
+
+      Client client;
+      try {
+        client = interpreterProcess.getClient();
+      } catch (Exception e) {
+        logger.error("Can't get status information", e);
+        lastStatus = Status.ERROR;
+        return Status.ERROR;
+      }
+
+      try {
+        String statusStr = client.getStatus(job.getId());
+        if ("Unknown".equals(statusStr)) {
+          // not found this job in the remote schedulers.
+          // maybe not submitted, maybe already finished
+          Status status = getLastStatus();
+          listener.afterStatusChange(job, null, status);
+          return status;
+        }
+        Status status = Status.valueOf(statusStr);
+        lastStatus = status;
+        listener.afterStatusChange(job, null, status);
+        return status;
+      } catch (TException e) {
+        logger.error("Can't get status information", e);
+        lastStatus = Status.ERROR;
+        return Status.ERROR;
+      } catch (Exception e) {
+        logger.error("Unknown status", e);
+        lastStatus = Status.ERROR;
+        return Status.ERROR;
+      } finally {
+        interpreterProcess.releaseClient(client);
+      }
+    }
+  }
+
+  private class JobRunner implements Runnable, JobListener {
+    private Scheduler scheduler;
+    private Job job;
+    private boolean jobExecuted;
+    boolean jobSubmittedRemotely;
+
+    public JobRunner(Scheduler scheduler, Job job) {
+      this.scheduler = scheduler;
+      this.job = job;
+      jobExecuted = false;
+      jobSubmittedRemotely = false;
+    }
+
+    public boolean isJobSubmittedInRemote() {
+      return jobSubmittedRemotely;
+    }
+
+    @Override
+    public void run() {
+      if (job.isAborted()) {
+        job.setStatus(Status.ABORT);
+        job.aborted = false;
+
+        synchronized (queue) {
+          running.remove(job);
+          queue.notify();
+        }
+
+        return;
+      }
+
+      JobStatusPoller jobStatusPoller = new JobStatusPoller(1500, 100, 500,
+          job, this);
+      jobStatusPoller.start();
+
+      if (listener != null) {
+        listener.jobStarted(scheduler, job);
+      }
+      job.run();
+      jobExecuted = true;
+      jobSubmittedRemotely = true;
+
+      jobStatusPoller.shutdown();
+      try {
+        jobStatusPoller.join();
+      } catch (InterruptedException e) {
+        logger.error("JobStatusPoller interrupted", e);
+      }
+
+      job.setStatus(jobStatusPoller.getStatus());
+      if (listener != null) {
+        listener.jobFinished(scheduler, job);
+      }
+
+      // reset aborted flag to allow retry
+      job.aborted = false;
+
+      synchronized (queue) {
+        running.remove(job);
+        queue.notify();
+      }
+    }
+
+    @Override
+    public void onProgressUpdate(Job job, int progress) {
+    }
+
+    @Override
+    public void beforeStatusChange(Job job, Status before, Status after) {
+    }
+
+    @Override
+    public void afterStatusChange(Job job, Status before, Status after) {
+      if (after == null) { // unknown. maybe before sumitted remotely, maybe already finished.
+        if (jobExecuted) {
+          jobSubmittedRemotely = true;
+          if (job.isAborted()) {
+            job.setStatus(Status.ABORT);
+          } else if (job.getException() != null) {
+            job.setStatus(Status.ERROR);
+          } else {
+            job.setStatus(Status.FINISHED);
+          }
+        }
+        return;
+      }
+
+
+      // Update remoteStatus
+      if (jobExecuted == false) {
+        if (after == Status.FINISHED || after == Status.ABORT
+            || after == Status.ERROR) {
+          // it can be status of last run.
+          // so not updating the remoteStatus
+          return;
+        } else if (after == Status.RUNNING) {
+          jobSubmittedRemotely = true;
+        }
+      } else {
+        jobSubmittedRemotely = true;
+      }
+
+      // status polled by status poller
+      if (job.getStatus() != after) {
+        job.setStatus(after);
+      }
+    }
+  }
+
+  @Override
+  public void stop() {
+    terminate = true;
+    synchronized (queue) {
+      queue.notify();
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Scheduler.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Scheduler.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Scheduler.java
new file mode 100644
index 0000000..a886c22
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/Scheduler.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import java.util.Collection;
+
+/**
+ * TODO(moon) : add description.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public interface Scheduler extends Runnable {
+  public String getName();
+
+  public Collection<Job> getJobsWaiting();
+
+  public Collection<Job> getJobsRunning();
+
+  public void submit(Job job);
+
+  public void stop();
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java
new file mode 100644
index 0000000..2556a81
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * TODO(moon) : add description.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class SchedulerFactory implements SchedulerListener {
+  private final Logger logger = LoggerFactory.getLogger(SchedulerFactory.class);
+  ScheduledExecutorService executor;
+  Map<String, Scheduler> schedulers = new LinkedHashMap<String, Scheduler>();
+
+  private static SchedulerFactory singleton;
+  private static Long singletonLock = new Long(0);
+
+  public static SchedulerFactory singleton() {
+    if (singleton == null) {
+      synchronized (singletonLock) {
+        if (singleton == null) {
+          try {
+            singleton = new SchedulerFactory();
+          } catch (Exception e) {
+            e.printStackTrace();
+          }
+        }
+      }
+    }
+    return singleton;
+  }
+
+  public SchedulerFactory() throws Exception {
+    executor = Executors.newScheduledThreadPool(100);
+  }
+
+  public void destroy() {
+    executor.shutdown();
+  }
+
+  public Scheduler createOrGetFIFOScheduler(String name) {
+    synchronized (schedulers) {
+      if (schedulers.containsKey(name) == false) {
+        Scheduler s = new FIFOScheduler(name, executor, this);
+        schedulers.put(name, s);
+        executor.execute(s);
+      }
+      return schedulers.get(name);
+    }
+  }
+
+  public Scheduler createOrGetParallelScheduler(String name, int maxConcurrency) {
+    synchronized (schedulers) {
+      if (schedulers.containsKey(name) == false) {
+        Scheduler s = new ParallelScheduler(name, executor, this, maxConcurrency);
+        schedulers.put(name, s);
+        executor.execute(s);
+      }
+      return schedulers.get(name);
+    }
+  }
+
+  public Scheduler createOrGetRemoteScheduler(
+      String name,
+      RemoteInterpreterProcess interpreterProcess,
+      int maxConcurrency) {
+
+    synchronized (schedulers) {
+      if (schedulers.containsKey(name) == false) {
+        Scheduler s = new RemoteScheduler(
+            name,
+            executor,
+            interpreterProcess,
+            this,
+            maxConcurrency);
+        schedulers.put(name, s);
+        executor.execute(s);
+      }
+      return schedulers.get(name);
+    }
+  }
+
+  public Scheduler removeScheduler(String name) {
+    synchronized (schedulers) {
+      Scheduler s = schedulers.remove(name);
+      if (s != null) {
+        s.stop();
+      }
+    }
+    return null;
+  }
+
+  public Collection<Scheduler> listScheduler(String name) {
+    List<Scheduler> s = new LinkedList<Scheduler>();
+    synchronized (schedulers) {
+      for (Scheduler ss : schedulers.values()) {
+        s.add(ss);
+      }
+    }
+    return s;
+  }
+
+  @Override
+  public void jobStarted(Scheduler scheduler, Job job) {
+    logger.info("Job " + job.getJobName() + " started by scheduler " + scheduler.getName());
+
+  }
+
+  @Override
+  public void jobFinished(Scheduler scheduler, Job job) {
+    logger.info("Job " + job.getJobName() + " finished by scheduler " + scheduler.getName());
+
+  }
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerListener.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerListener.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerListener.java
new file mode 100644
index 0000000..6fdd176
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerListener.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+/**
+ * TODO(moon) : add description.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public interface SchedulerListener {
+  public void jobStarted(Scheduler scheduler, Job job);
+
+  public void jobFinished(Scheduler scheduler, Job job);
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/thrift/RemoteInterpreterService.thrift
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/thrift/RemoteInterpreterService.thrift b/zeppelin-interpreter/src/main/thrift/RemoteInterpreterService.thrift
index bbb54b1..051730e 100644
--- a/zeppelin-interpreter/src/main/thrift/RemoteInterpreterService.thrift
+++ b/zeppelin-interpreter/src/main/thrift/RemoteInterpreterService.thrift
@@ -1,4 +1,22 @@
-namespace java com.nflabs.zeppelin.interpreter.thrift
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace java org.apache.zeppelin.interpreter.thrift
 
 
 struct RemoteInterpreterContext {

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/display/InputTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/display/InputTest.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/display/InputTest.java
deleted file mode 100644
index 091473b..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/display/InputTest.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package com.nflabs.zeppelin.display;
-
-import static org.junit.Assert.*;
-
-import java.io.IOException;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class InputTest {
-
-	@Before
-	public void setUp() throws Exception {
-	}
-
-	@After
-	public void tearDown() throws Exception {
-	}
-
-	@Test
-	public void testDefaultParamReplace() throws IOException{
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterProcessTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterProcessTest.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterProcessTest.java
deleted file mode 100644
index 181b1b0..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterProcessTest.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-
-import java.util.HashMap;
-
-import org.junit.Test;
-
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
-
-public class RemoteInterpreterProcessTest {
-
-  @Test
-  public void testStartStop() {
-    RemoteInterpreterProcess rip = new RemoteInterpreterProcess("../bin/interpreter.sh", "nonexists", new HashMap<String, String>());
-    assertFalse(rip.isRunning());
-    assertEquals(0, rip.referenceCount());
-    assertEquals(1, rip.reference());
-    assertEquals(2, rip.reference());
-    assertEquals(true, rip.isRunning());
-    assertEquals(1, rip.dereference());
-    assertEquals(true, rip.isRunning());
-    assertEquals(0, rip.dereference());
-    assertEquals(false, rip.isRunning());
-  }
-
-  @Test
-  public void testClientFactory() throws Exception {
-    RemoteInterpreterProcess rip = new RemoteInterpreterProcess("../bin/interpreter.sh", "nonexists", new HashMap<String, String>());
-    rip.reference();
-    assertEquals(0, rip.getNumActiveClient());
-    assertEquals(0, rip.getNumIdleClient());
-
-    Client client = rip.getClient();
-    assertEquals(1, rip.getNumActiveClient());
-    assertEquals(0, rip.getNumIdleClient());
-
-    rip.releaseClient(client);
-    assertEquals(0, rip.getNumActiveClient());
-    assertEquals(1, rip.getNumIdleClient());
-
-    rip.dereference();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterServerTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterServerTest.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterServerTest.java
deleted file mode 100644
index 809c76e..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterServerTest.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-
-import org.apache.thrift.TException;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class RemoteInterpreterServerTest {
-  @Before
-  public void setUp() throws Exception {
-  }
-
-  @After
-  public void tearDown() throws Exception {
-  }
-
-  @Test
-  public void testStartStop() throws InterruptedException, IOException, TException {
-    RemoteInterpreterServer server = new RemoteInterpreterServer(
-        RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces());
-    assertEquals(false, server.isRunning());
-
-    server.start();
-    long startTime = System.currentTimeMillis();
-    boolean running = false;
-
-    while (System.currentTimeMillis() - startTime < 10 * 1000) {
-      if (server.isRunning()) {
-        running = true;
-        break;
-      } else {
-        Thread.sleep(200);
-      }
-    }
-
-    assertEquals(true, running);
-    assertEquals(true, RemoteInterpreterUtils.checkIfRemoteEndpointAccessible("localhost", server.getPort()));
-
-    server.shutdown();
-
-    while (System.currentTimeMillis() - startTime < 10 * 1000) {
-      if (server.isRunning()) {
-        Thread.sleep(200);
-      } else {
-        running = false;
-        break;
-      }
-    }
-    assertEquals(false, running);
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterTest.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterTest.java
deleted file mode 100644
index dcee6aa..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterTest.java
+++ /dev/null
@@ -1,428 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.thrift.transport.TTransportException;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.remote.mock.MockInterpreterA;
-import com.nflabs.zeppelin.interpreter.remote.mock.MockInterpreterB;
-import com.nflabs.zeppelin.scheduler.Job;
-import com.nflabs.zeppelin.scheduler.Job.Status;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-
-public class RemoteInterpreterTest {
-
-
-  private InterpreterGroup intpGroup;
-  private HashMap<String, String> env;
-
-  @Before
-  public void setUp() throws Exception {
-    intpGroup = new InterpreterGroup();
-    env = new HashMap<String, String>();
-    env.put("ZEPPELIN_CLASSPATH", new File("./target/test-classes").getAbsolutePath());
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    intpGroup.clone();
-    intpGroup.destroy();
-  }
-
-  @Test
-  public void testRemoteInterperterCall() throws TTransportException, IOException {
-    Properties p = new Properties();
-
-    RemoteInterpreter intpA = new RemoteInterpreter(
-        p,
-        MockInterpreterA.class.getName(),
-        new File("../bin/interpreter.sh").getAbsolutePath(),
-        "fake",
-        env
-        );
-
-    intpGroup.add(intpA);
-    intpA.setInterpreterGroup(intpGroup);
-
-    RemoteInterpreter intpB = new RemoteInterpreter(
-        p,
-        MockInterpreterB.class.getName(),
-        new File("../bin/interpreter.sh").getAbsolutePath(),
-        "fake",
-        env
-        );
-
-    intpGroup.add(intpB);
-    intpB.setInterpreterGroup(intpGroup);
-
-
-    RemoteInterpreterProcess process = intpA.getInterpreterProcess();
-    process.equals(intpB.getInterpreterProcess());
-
-    assertFalse(process.isRunning());
-    assertEquals(0, process.getNumIdleClient());
-    assertEquals(0, process.referenceCount());
-
-    intpA.open();
-    assertTrue(process.isRunning());
-    assertEquals(1, process.getNumIdleClient());
-    assertEquals(1, process.referenceCount());
-
-    intpA.interpret("1",
-        new InterpreterContext(
-            "id",
-            "title",
-            "text",
-            new HashMap<String, Object>(),
-            new GUI()));
-
-    intpB.open();
-    assertEquals(2, process.referenceCount());
-
-    intpA.close();
-    assertEquals(1, process.referenceCount());
-    intpB.close();
-    assertEquals(0, process.referenceCount());
-
-    assertFalse(process.isRunning());
-
-  }
-
-  @Test
-  public void testRemoteSchedulerSharing() throws TTransportException, IOException {
-    Properties p = new Properties();
-
-    RemoteInterpreter intpA = new RemoteInterpreter(
-        p,
-        MockInterpreterA.class.getName(),
-        new File("../bin/interpreter.sh").getAbsolutePath(),
-        "fake",
-        env
-        );
-
-    intpGroup.add(intpA);
-    intpA.setInterpreterGroup(intpGroup);
-
-    RemoteInterpreter intpB = new RemoteInterpreter(
-        p,
-        MockInterpreterB.class.getName(),
-        new File("../bin/interpreter.sh").getAbsolutePath(),
-        "fake",
-        env
-        );
-
-    intpGroup.add(intpB);
-    intpB.setInterpreterGroup(intpGroup);
-
-    intpA.open();
-    intpB.open();
-
-    long start = System.currentTimeMillis();
-    InterpreterResult ret = intpA.interpret("500",
-        new InterpreterContext(
-            "id",
-            "title",
-            "text",
-            new HashMap<String, Object>(),
-            new GUI()));
-    assertEquals("500", ret.message());
-
-    ret = intpB.interpret("500",
-        new InterpreterContext(
-            "id",
-            "title",
-            "text",
-            new HashMap<String, Object>(),
-            new GUI()));
-    assertEquals("1000", ret.message());
-    long end = System.currentTimeMillis();
-    assertTrue(end - start >= 1000);
-
-
-    intpA.close();
-    intpB.close();
-
-    RemoteInterpreterProcess process = intpA.getInterpreterProcess();
-    assertFalse(process.isRunning());
-  }
-
-  @Test
-  public void testRemoteSchedulerSharingSubmit() throws TTransportException, IOException, InterruptedException {
-    Properties p = new Properties();
-
-    final RemoteInterpreter intpA = new RemoteInterpreter(
-        p,
-        MockInterpreterA.class.getName(),
-        new File("../bin/interpreter.sh").getAbsolutePath(),
-        "fake",
-        env
-        );
-
-    intpGroup.add(intpA);
-    intpA.setInterpreterGroup(intpGroup);
-
-    final RemoteInterpreter intpB = new RemoteInterpreter(
-        p,
-        MockInterpreterB.class.getName(),
-        new File("../bin/interpreter.sh").getAbsolutePath(),
-        "fake",
-        env
-        );
-
-    intpGroup.add(intpB);
-    intpB.setInterpreterGroup(intpGroup);
-
-    intpA.open();
-    intpB.open();
-
-    long start = System.currentTimeMillis();
-    Job jobA = new Job("jobA", null) {
-
-      @Override
-      public int progress() {
-        return 0;
-      }
-
-      @Override
-      public Map<String, Object> info() {
-        return null;
-      }
-
-      @Override
-      protected Object jobRun() throws Throwable {
-        return intpA.interpret("500",
-            new InterpreterContext(
-                "jobA",
-                "title",
-                "text",
-                new HashMap<String, Object>(),
-                new GUI()));
-      }
-
-      @Override
-      protected boolean jobAbort() {
-        return false;
-      }
-
-    };
-    intpA.getScheduler().submit(jobA);
-
-    Job jobB = new Job("jobB", null) {
-
-      @Override
-      public int progress() {
-        return 0;
-      }
-
-      @Override
-      public Map<String, Object> info() {
-        return null;
-      }
-
-      @Override
-      protected Object jobRun() throws Throwable {
-        return intpB.interpret("500",
-            new InterpreterContext(
-                "jobB",
-                "title",
-                "text",
-                new HashMap<String, Object>(),
-                new GUI()));
-      }
-
-      @Override
-      protected boolean jobAbort() {
-        return false;
-      }
-
-    };
-    intpB.getScheduler().submit(jobB);
-
-    // wait until both job finished
-    while (jobA.getStatus() != Status.FINISHED ||
-           jobB.getStatus() != Status.FINISHED) {
-      Thread.sleep(100);
-    }
-
-    long end = System.currentTimeMillis();
-    assertTrue(end - start >= 1000);
-
-    assertEquals("1000", ((InterpreterResult) jobB.getReturn()).message());
-
-    intpA.close();
-    intpB.close();
-
-    RemoteInterpreterProcess process = intpA.getInterpreterProcess();
-    assertFalse(process.isRunning());
-  }
-
-  @Test
-  public void testRunOrderPreserved() throws InterruptedException {
-    Properties p = new Properties();
-
-    final RemoteInterpreter intpA = new RemoteInterpreter(
-        p,
-        MockInterpreterA.class.getName(),
-        new File("../bin/interpreter.sh").getAbsolutePath(),
-        "fake",
-        env
-        );
-
-    intpGroup.add(intpA);
-    intpA.setInterpreterGroup(intpGroup);
-
-    intpA.open();
-
-    int concurrency = 3;
-    final List<String> results = new LinkedList<String>();
-
-    Scheduler scheduler = intpA.getScheduler();
-    for (int i = 0; i < concurrency; i++) {
-      final String jobId = Integer.toString(i);
-      scheduler.submit(new Job(jobId, Integer.toString(i), null, 200) {
-
-        @Override
-        public int progress() {
-          return 0;
-        }
-
-        @Override
-        public Map<String, Object> info() {
-          return null;
-        }
-
-        @Override
-        protected Object jobRun() throws Throwable {
-          InterpreterResult ret = intpA.interpret(getJobName(), new InterpreterContext(
-              jobId,
-              "title",
-              "text",
-              new HashMap<String, Object>(),
-              new GUI()));
-
-          synchronized (results) {
-            results.add(ret.message());
-            results.notify();
-          }
-          return null;
-        }
-
-        @Override
-        protected boolean jobAbort() {
-          return false;
-        }
-
-      });
-    }
-
-    // wait for job finished
-    synchronized (results) {
-      while (results.size() != concurrency) {
-        results.wait(300);
-      }
-    }
-
-    int i = 0;
-    for (String result : results) {
-      assertEquals(Integer.toString(i++), result);
-    }
-    assertEquals(concurrency, i);
-
-    intpA.close();
-  }
-
-
-  @Test
-  public void testRunParallel() throws InterruptedException {
-    Properties p = new Properties();
-    p.put("parallel", "true");
-
-    final RemoteInterpreter intpA = new RemoteInterpreter(
-        p,
-        MockInterpreterA.class.getName(),
-        new File("../bin/interpreter.sh").getAbsolutePath(),
-        "fake",
-        env
-        );
-
-    intpGroup.add(intpA);
-    intpA.setInterpreterGroup(intpGroup);
-
-    intpA.open();
-
-    int concurrency = 4;
-    final int timeToSleep = 1000;
-    final List<String> results = new LinkedList<String>();
-    long start = System.currentTimeMillis();
-
-    Scheduler scheduler = intpA.getScheduler();
-    for (int i = 0; i < concurrency; i++) {
-      final String jobId = Integer.toString(i);
-      scheduler.submit(new Job(jobId, Integer.toString(i), null, 300) {
-
-        @Override
-        public int progress() {
-          return 0;
-        }
-
-        @Override
-        public Map<String, Object> info() {
-          return null;
-        }
-
-        @Override
-        protected Object jobRun() throws Throwable {
-          String stmt = Integer.toString(timeToSleep);
-          InterpreterResult ret = intpA.interpret(stmt, new InterpreterContext(
-              jobId,
-              "title",
-              "text",
-              new HashMap<String, Object>(),
-              new GUI()));
-
-          synchronized (results) {
-            results.add(ret.message());
-            results.notify();
-          }
-          return stmt;
-        }
-
-        @Override
-        protected boolean jobAbort() {
-          return false;
-        }
-
-      });
-    }
-
-    // wait for job finished
-    synchronized (results) {
-      while (results.size() != concurrency) {
-        results.wait(300);
-      }
-    }
-
-    long end = System.currentTimeMillis();
-
-    assertTrue(end - start < timeToSleep * concurrency);
-
-    intpA.close();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterUtilsTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterUtilsTest.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterUtilsTest.java
deleted file mode 100644
index 3035cf2..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterUtilsTest.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote;
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-
-import org.junit.Test;
-
-public class RemoteInterpreterUtilsTest {
-
-  @Test
-  public void testFindRandomAvailablePortOnAllLocalInterfaces() throws IOException {
-    assertTrue(RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces() > 0);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/mock/MockInterpreterA.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/mock/MockInterpreterA.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/mock/MockInterpreterA.java
deleted file mode 100644
index 1df3979..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/mock/MockInterpreterA.java
+++ /dev/null
@@ -1,84 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote.mock;
-
-import java.util.List;
-import java.util.Properties;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import com.nflabs.zeppelin.interpreter.InterpreterPropertyBuilder;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-public class MockInterpreterA extends Interpreter {
-  static {
-    Interpreter.register(
-        "interpreterA",
-        "group1",
-        MockInterpreterA.class.getName(),
-        new InterpreterPropertyBuilder()
-            .add("p1", "v1", "property1").build());
-
-  }
-
-  private String lastSt;
-
-  public MockInterpreterA(Properties property) {
-    super(property);
-  }
-
-  @Override
-  public void open() {
-    //new RuntimeException().printStackTrace();
-  }
-
-  @Override
-  public void close() {
-  }
-
-  public String getLastStatement() {
-    return lastSt;
-  }
-
-  @Override
-  public InterpreterResult interpret(String st, InterpreterContext context) {
-    try {
-      Thread.sleep(Long.parseLong(st));
-      this.lastSt = st;
-    } catch (NumberFormatException | InterruptedException e) {
-      throw new InterpreterException(e);
-    }
-    return new InterpreterResult(Code.SUCCESS, st);
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {
-
-  }
-
-  @Override
-  public FormType getFormType() {
-    return FormType.NATIVE;
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    return 0;
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    return null;
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    if (getProperty("parallel") != null && getProperty("parallel").equals("true")) {
-      return SchedulerFactory.singleton().createOrGetParallelScheduler("interpreter_" + this.hashCode(), 10);
-    } else {
-      return SchedulerFactory.singleton().createOrGetFIFOScheduler("interpreter_" + this.hashCode());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/mock/MockInterpreterB.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/mock/MockInterpreterB.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/mock/MockInterpreterB.java
deleted file mode 100644
index 39f2ab8..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/interpreter/remote/mock/MockInterpreterB.java
+++ /dev/null
@@ -1,101 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote.mock;
-
-import java.util.List;
-import java.util.Properties;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterPropertyBuilder;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-import com.nflabs.zeppelin.interpreter.WrappedInterpreter;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-
-public class MockInterpreterB extends Interpreter {
-  static {
-    Interpreter.register(
-        "interpreterB",
-        "group1",
-        MockInterpreterA.class.getName(),
-        new InterpreterPropertyBuilder()
-            .add("p1", "v1", "property1").build());
-
-  }
-  public MockInterpreterB(Properties property) {
-    super(property);
-  }
-
-  @Override
-  public void open() {
-    //new RuntimeException().printStackTrace();
-  }
-
-  @Override
-  public void close() {
-  }
-
-  @Override
-  public InterpreterResult interpret(String st, InterpreterContext context) {
-    MockInterpreterA intpA = getInterpreterA();
-    String intpASt = intpA.getLastStatement();
-    long timeToSleep = Long.parseLong(st);
-    if (intpASt != null) {
-      timeToSleep += Long.parseLong(intpASt);
-    }
-    try {
-      Thread.sleep(timeToSleep);
-    } catch (NumberFormatException | InterruptedException e) {
-      throw new InterpreterException(e);
-    }
-    return new InterpreterResult(Code.SUCCESS, Long.toString(timeToSleep));
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {
-
-  }
-
-  @Override
-  public FormType getFormType() {
-    return FormType.NATIVE;
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    return 0;
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    return null;
-  }
-
-  public MockInterpreterA getInterpreterA() {
-    InterpreterGroup interpreterGroup = getInterpreterGroup();
-    for (Interpreter intp : interpreterGroup) {
-      if (intp.getClassName().equals(MockInterpreterA.class.getName())) {
-        Interpreter p = intp;
-        while (p instanceof WrappedInterpreter) {
-          p = ((WrappedInterpreter) p).getInnerInterpreter();
-        }
-        return (MockInterpreterA) p;
-      }
-    }
-    return null;
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    InterpreterGroup interpreterGroup = getInterpreterGroup();
-    for (Interpreter intp : interpreterGroup) {
-      if (intp.getClassName().equals(MockInterpreterA.class.getName())) {
-        return intp.getScheduler();
-      }
-    }
-
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/FIFOSchedulerTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/FIFOSchedulerTest.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/FIFOSchedulerTest.java
deleted file mode 100644
index 37a29d1..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/FIFOSchedulerTest.java
+++ /dev/null
@@ -1,74 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import junit.framework.TestCase;
-
-import com.nflabs.zeppelin.scheduler.Job.Status;
-
-public class FIFOSchedulerTest extends TestCase {
-
-	private SchedulerFactory schedulerSvc;
-
-	@Override
-  public void setUp() throws Exception{
-		schedulerSvc = new SchedulerFactory();
-	}
-
-	@Override
-  public void tearDown(){
-
-	}
-
-	public void testRun() throws InterruptedException{
-		Scheduler s = schedulerSvc.createOrGetFIFOScheduler("test");
-		assertEquals(0, s.getJobsRunning().size());
-		assertEquals(0, s.getJobsWaiting().size());
-
-		Job job1 = new SleepingJob("job1", null, 500);
-		Job job2 = new SleepingJob("job2", null, 500);
-
-		s.submit(job1);
-		s.submit(job2);
-		Thread.sleep(200);
-
-		assertEquals(Status.RUNNING, job1.getStatus());
-		assertEquals(Status.PENDING, job2.getStatus());
-		assertEquals(1, s.getJobsRunning().size());
-		assertEquals(1, s.getJobsWaiting().size());
-
-
-		Thread.sleep(500);
-		assertEquals(Status.FINISHED, job1.getStatus());
-		assertEquals(Status.RUNNING, job2.getStatus());
-		assertTrue((500 < (Long)job1.getReturn()));
-		assertEquals(1, s.getJobsRunning().size());
-		assertEquals(0, s.getJobsWaiting().size());
-
-	}
-
-	public void testAbort() throws InterruptedException{
-		Scheduler s = schedulerSvc.createOrGetFIFOScheduler("test");
-		assertEquals(0, s.getJobsRunning().size());
-		assertEquals(0, s.getJobsWaiting().size());
-
-		Job job1 = new SleepingJob("job1", null, 500);
-		Job job2 = new SleepingJob("job2", null, 500);
-
-		s.submit(job1);
-		s.submit(job2);
-
-		Thread.sleep(200);
-
-		job1.abort();
-		job2.abort();
-
-		Thread.sleep(200);
-
-		assertEquals(Status.ABORT, job1.getStatus());
-		assertEquals(Status.ABORT, job2.getStatus());
-
-		assertTrue((500 > (Long)job1.getReturn()));
-		assertEquals(null, job2.getReturn());
-
-
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/ParallelSchedulerTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/ParallelSchedulerTest.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/ParallelSchedulerTest.java
deleted file mode 100644
index f88de4c..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/ParallelSchedulerTest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-
-import com.nflabs.zeppelin.scheduler.Job.Status;
-
-import junit.framework.TestCase;
-public class ParallelSchedulerTest extends TestCase {
-
-	private SchedulerFactory schedulerSvc;
-
-	public void setUp() throws Exception{
-		schedulerSvc = new SchedulerFactory();
-	}
-	
-	public void tearDown(){
-		
-	}
-	
-	public void testRun() throws InterruptedException{
-		Scheduler s = schedulerSvc.createOrGetParallelScheduler("test", 2);
-		assertEquals(0, s.getJobsRunning().size());
-		assertEquals(0, s.getJobsWaiting().size());
-		
-		Job job1 = new SleepingJob("job1", null, 500);
-		Job job2 = new SleepingJob("job2", null, 500);
-		Job job3 = new SleepingJob("job3", null, 500);		
-		
-		s.submit(job1);
-		s.submit(job2);
-		s.submit(job3);
-		Thread.sleep(200);
-
-		assertEquals(Status.RUNNING, job1.getStatus());
-		assertEquals(Status.RUNNING, job2.getStatus());
-		assertEquals(Status.PENDING, job3.getStatus());
-		assertEquals(2, s.getJobsRunning().size());
-		assertEquals(1, s.getJobsWaiting().size());
-		
-		Thread.sleep(500);
-		
-		assertEquals(Status.FINISHED, job1.getStatus());
-		assertEquals(Status.FINISHED, job2.getStatus());
-		assertEquals(Status.RUNNING, job3.getStatus());
-		assertEquals(1, s.getJobsRunning().size());
-		assertEquals(0, s.getJobsWaiting().size());
-
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/RemoteSchedulerTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/RemoteSchedulerTest.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/RemoteSchedulerTest.java
deleted file mode 100644
index 35aa1d3..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/RemoteSchedulerTest.java
+++ /dev/null
@@ -1,105 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.remote.RemoteInterpreter;
-import com.nflabs.zeppelin.interpreter.remote.mock.MockInterpreterA;
-
-public class RemoteSchedulerTest {
-
-  private SchedulerFactory schedulerSvc;
-
-  @Before
-  public void setUp() throws Exception{
-    schedulerSvc = new SchedulerFactory();
-  }
-
-  @After
-  public void tearDown(){
-
-  }
-
-  @Test
-  public void test() throws Exception {
-    Properties p = new Properties();
-    InterpreterGroup intpGroup = new InterpreterGroup();
-    Map<String, String> env = new HashMap<String, String>();
-    env.put("ZEPPELIN_CLASSPATH", new File("./target/test-classes").getAbsolutePath());
-
-    final RemoteInterpreter intpA = new RemoteInterpreter(
-        p,
-        MockInterpreterA.class.getName(),
-        new File("../bin/interpreter.sh").getAbsolutePath(),
-        "fake",
-        env
-        );
-
-    intpGroup.add(intpA);
-    intpA.setInterpreterGroup(intpGroup);
-
-    intpA.open();
-
-    Scheduler scheduler = schedulerSvc.createOrGetRemoteScheduler("test",
-        intpA.getInterpreterProcess(),
-        10);
-
-    Job job = new Job("jobId", "jobName", null, 200) {
-
-      @Override
-      public int progress() {
-        return 0;
-      }
-
-      @Override
-      public Map<String, Object> info() {
-        return null;
-      }
-
-      @Override
-      protected Object jobRun() throws Throwable {
-        intpA.interpret("1000", new InterpreterContext(
-            "jobId",
-            "title",
-            "text",
-            new HashMap<String, Object>(),
-            new GUI()));
-        return "1000";
-      }
-
-      @Override
-      protected boolean jobAbort() {
-        return false;
-      }
-    };
-    scheduler.submit(job);
-
-    while (job.isRunning() == false) {
-      Thread.sleep(100);
-    }
-
-    Thread.sleep(500);
-    assertEquals(0, scheduler.getJobsWaiting().size());
-    assertEquals(1, scheduler.getJobsRunning().size());
-
-    Thread.sleep(500);
-
-    assertEquals(0, scheduler.getJobsWaiting().size());
-    assertEquals(0, scheduler.getJobsRunning().size());
-
-    intpA.close();
-    schedulerSvc.removeScheduler("test");
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/SleepingJob.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/SleepingJob.java b/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/SleepingJob.java
deleted file mode 100644
index 42d0316..0000000
--- a/zeppelin-interpreter/src/test/java/com/nflabs/zeppelin/scheduler/SleepingJob.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class SleepingJob extends Job{
-	
-	private int time;
-	boolean abort = false;
-	private long start;
-	private int count;
-	
-	
-	public SleepingJob(String jobName, JobListener listener, int time){
-		super(jobName, listener);
-		this.time = time;
-		count = 0;
-	}
-	public Object jobRun() {
-		start = System.currentTimeMillis();
-		while(abort==false){
-			count++;
-			try {
-				Thread.sleep(10);
-			} catch (InterruptedException e) {
-			}
-			if(System.currentTimeMillis() - start>time) break;
-		}
-		return System.currentTimeMillis()-start;
-	}
-
-	public boolean jobAbort() {
-		abort = true;
-		return true;
-	}
-
-	public int progress() {
-		long p = (System.currentTimeMillis() - start)*100 / time;
-		if(p<0) p = 0;
-		if(p>100) p = 100;
-		return (int) p;
-	}
-
-	public Map<String, Object> info() {
-		Map<String, Object> i = new HashMap<String, Object>();
-		i.put("LoopCount", Integer.toString(count));
-		return i;
-	}
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/display/InputTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/display/InputTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/display/InputTest.java
new file mode 100644
index 0000000..626ae99
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/display/InputTest.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.display;
+
+import java.io.IOException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class InputTest {
+
+	@Before
+	public void setUp() throws Exception {
+	}
+
+	@After
+	public void tearDown() throws Exception {
+	}
+
+	@Test
+	public void testDefaultParamReplace() throws IOException{
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcessTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcessTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcessTest.java
new file mode 100644
index 0000000..02dc224
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterProcessTest.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+import java.util.HashMap;
+
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess;
+import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
+import org.junit.Test;
+
+public class RemoteInterpreterProcessTest {
+
+  @Test
+  public void testStartStop() {
+    RemoteInterpreterProcess rip = new RemoteInterpreterProcess("../bin/interpreter.sh", "nonexists", new HashMap<String, String>());
+    assertFalse(rip.isRunning());
+    assertEquals(0, rip.referenceCount());
+    assertEquals(1, rip.reference());
+    assertEquals(2, rip.reference());
+    assertEquals(true, rip.isRunning());
+    assertEquals(1, rip.dereference());
+    assertEquals(true, rip.isRunning());
+    assertEquals(0, rip.dereference());
+    assertEquals(false, rip.isRunning());
+  }
+
+  @Test
+  public void testClientFactory() throws Exception {
+    RemoteInterpreterProcess rip = new RemoteInterpreterProcess("../bin/interpreter.sh", "nonexists", new HashMap<String, String>());
+    rip.reference();
+    assertEquals(0, rip.getNumActiveClient());
+    assertEquals(0, rip.getNumIdleClient());
+
+    Client client = rip.getClient();
+    assertEquals(1, rip.getNumActiveClient());
+    assertEquals(0, rip.getNumIdleClient());
+
+    rip.releaseClient(client);
+    assertEquals(0, rip.getNumActiveClient());
+    assertEquals(1, rip.getNumIdleClient());
+
+    rip.dereference();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServerTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServerTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServerTest.java
new file mode 100644
index 0000000..af6b4bd
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServerTest.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+
+import org.apache.thrift.TException;
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer;
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreterUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class RemoteInterpreterServerTest {
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  @Test
+  public void testStartStop() throws InterruptedException, IOException, TException {
+    RemoteInterpreterServer server = new RemoteInterpreterServer(
+        RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces());
+    assertEquals(false, server.isRunning());
+
+    server.start();
+    long startTime = System.currentTimeMillis();
+    boolean running = false;
+
+    while (System.currentTimeMillis() - startTime < 10 * 1000) {
+      if (server.isRunning()) {
+        running = true;
+        break;
+      } else {
+        Thread.sleep(200);
+      }
+    }
+
+    assertEquals(true, running);
+    assertEquals(true, RemoteInterpreterUtils.checkIfRemoteEndpointAccessible("localhost", server.getPort()));
+
+    server.shutdown();
+
+    while (System.currentTimeMillis() - startTime < 10 * 1000) {
+      if (server.isRunning()) {
+        Thread.sleep(200);
+      } else {
+        running = false;
+        break;
+      }
+    }
+    assertEquals(false, running);
+  }
+
+
+}


[17/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
Rename package/groupId to org.apache and apply rat plugin.

This PR handles https://issues.apache.org/jira/browse/ZEPPELIN-12.

* groupId at pom.xml file is changed from com.nflabs.zeppelin to org.apache.zeppelin
* package name is changed from com.nflabs.zeppelin to org.apache.zeppelin
* apache-rat plugin is applied (license header is added to every file) and NOTICE is updated (https://www.apache.org/legal/src-headers.html)
* removed sphinx doc. because of doc was out dated (it was for 0.3.0)

Please, review the changes. Especially, check NOTICE file if there're something i missed.

Author: Lee moon soo <mo...@apache.org>

Closes #13 from Leemoonsoo/rat and squashes the following commits:

892695f [Lee moon soo] hive interpreter module com.nflabs -> org.apache. Add license to the hive/pom.xml
c9a07c9 [Lee moon soo] Use correct package name
06a802a [Lee moon soo] One file is missed while renaming it
9902997 [Lee moon soo] Add missing import
643530a [Lee moon soo] Exclude .log from rat
fb15d0b [Lee moon soo] Exclude dependency-reduced-pom.xml from rat plugin
5faf7b1 [Lee moon soo] Apply rat plugin and com.nflabs -> org.apache
5edc77b [Lee moon soo] Update license of ScreenCaptureHtmlUnitDriver.java
1bfef1f [Lee moon soo] Update notice file
d7172fe [Lee moon soo] Add source file license header
92eb87f [Lee moon soo] Remove old sphinx doc
be06c43 [Lee moon soo] Remove unused erb
1ffca75 [Lee moon soo] Remove unused file


Project: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/commit/669d408d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/tree/669d408d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/diff/669d408d

Branch: refs/heads/master
Commit: 669d408dc93226b3d3d1e9271f17eed09ad7d158
Parents: 7a60b33
Author: Lee moon soo <mo...@apache.org>
Authored: Mon Apr 6 12:45:40 2015 +0900
Committer: Lee moon soo <mo...@apache.org>
Committed: Mon Apr 6 13:05:04 2015 +0900

----------------------------------------------------------------------
 .travis.yml                                     |   15 +
 NOTICE                                          |   56 +-
 _tools/checkstyle.xml                           |   15 +
 bin/common.sh                                   |   17 +-
 bin/interpreter.sh                              |   18 +-
 bin/zeppelin-daemon.sh                          |    2 +-
 bin/zeppelin.sh                                 |    2 +-
 conf/log4j.properties                           |   17 +
 conf/zeppelin-env.sh.template                   |   16 +
 conf/zeppelin-site.xml.template                 |    2 +-
 hive/pom.xml                                    |   21 +-
 .../apache/zeppelin/hive/HiveInterpreter.java   |    8 +-
 .../zeppelin/hive/HiveInterpreterTest.java      |    4 +-
 markdown/pom.xml                                |   23 +-
 .../com/nflabs/zeppelin/markdown/Markdown.java  |   77 -
 .../org/apache/zeppelin/markdown/Markdown.java  |   93 +
 .../nflabs/zeppelin/markdown/MarkdownTest.java  |   32 -
 .../apache/zeppelin/markdown/MarkdownTest.java  |   49 +
 pom.xml                                         |   81 +-
 shell/pom.xml                                   |   23 +-
 .../nflabs/zeppelin/shell/ShellInterpreter.java |   97 -
 .../apache/zeppelin/shell/ShellInterpreter.java |  113 +
 spark/pom.xml                                   |   44 +-
 .../nflabs/zeppelin/spark/DepInterpreter.java   |  273 -
 .../zeppelin/spark/PySparkInterpreter.java      |  406 -
 .../nflabs/zeppelin/spark/SparkInterpreter.java |  718 --
 .../zeppelin/spark/SparkSqlInterpreter.java     |  339 -
 .../nflabs/zeppelin/spark/ZeppelinContext.java  |  238 -
 .../com/nflabs/zeppelin/spark/dep/Booter.java   |   53 -
 .../nflabs/zeppelin/spark/dep/Dependency.java   |   73 -
 .../zeppelin/spark/dep/DependencyContext.java   |  134 -
 .../zeppelin/spark/dep/DependencyResolver.java  |  333 -
 .../nflabs/zeppelin/spark/dep/Repository.java   |   37 -
 .../zeppelin/spark/dep/RepositoryListener.java  |   87 -
 .../spark/dep/RepositorySystemFactory.java      |   52 -
 .../zeppelin/spark/dep/TransferListener.java    |  130 -
 .../apache/zeppelin/spark/DepInterpreter.java   |  289 +
 .../zeppelin/spark/PySparkInterpreter.java      |  422 +
 .../apache/zeppelin/spark/SparkInterpreter.java |  741 ++
 .../zeppelin/spark/SparkSqlInterpreter.java     |  362 +
 .../apache/zeppelin/spark/ZeppelinContext.java  |  254 +
 .../org/apache/zeppelin/spark/dep/Booter.java   |   70 +
 .../apache/zeppelin/spark/dep/Dependency.java   |   90 +
 .../zeppelin/spark/dep/DependencyContext.java   |  151 +
 .../zeppelin/spark/dep/DependencyResolver.java  |  350 +
 .../apache/zeppelin/spark/dep/Repository.java   |   54 +
 .../zeppelin/spark/dep/RepositoryListener.java  |  121 +
 .../spark/dep/RepositorySystemFactory.java      |   69 +
 .../zeppelin/spark/dep/TransferListener.java    |  148 +
 .../main/resources/python/zeppelin_pyspark.py   |   17 +
 .../zeppelin/spark/ReflectSparkILoop.scala      |   54 -
 .../zeppelin/spark/ReflectSparkIMain.scala      |   17 -
 .../com/nflabs/zeppelin/spark/SparkRepl.scala   |   72 -
 .../zeppelin/spark/DepInterpreterTest.java      |   73 -
 .../zeppelin/spark/SparkInterpreterTest.java    |  118 -
 .../zeppelin/spark/SparkSqlInterpreterTest.java |   94 -
 .../spark/dep/DependencyResolverTest.java       |   34 -
 .../zeppelin/spark/DepInterpreterTest.java      |   91 +
 .../zeppelin/spark/SparkInterpreterTest.java    |  135 +
 .../zeppelin/spark/SparkSqlInterpreterTest.java |  112 +
 .../spark/dep/DependencyResolverTest.java       |   52 +
 testing/startSparkCluster.sh                    |   17 +
 testing/stopSparkCluster.sh                     |   19 +-
 zeppelin-distribution/README.md                 |   17 +
 zeppelin-distribution/build-infrastructure.md   |   17 +
 zeppelin-distribution/pom.xml                   |   19 +-
 .../src/assemble/distribution.xml               |   23 +-
 zeppelin-distribution/src/deb/control/control   |   19 +
 zeppelin-distribution/src/deb/control/prerm     |   20 +-
 zeppelin-distribution/src/deb/init.d/zeppelind  |   18 +
 zeppelin-docs/Makefile                          |  174 -
 zeppelin-docs/README.md                         |   24 -
 zeppelin-docs/pom.xml                           |   87 -
 zeppelin-docs/src/main/sphinx/index.rst         |   21 -
 zeppelin-docs/src/main/sphinx/installation.rst  |    8 -
 .../src/main/sphinx/installation/install.rst    |  101 -
 .../main/sphinx/templates/zeppelin/layout.html  |   38 -
 .../templates/zeppelin/static/alert_info_32.png |  Bin 1530 -> 0 bytes
 .../zeppelin/static/alert_warning_32.png        |  Bin 974 -> 0 bytes
 .../sphinx/templates/zeppelin/static/bullet.png |  Bin 109 -> 0 bytes
 .../templates/zeppelin/static/zeppelin.css_t    |  132 -
 .../main/sphinx/templates/zeppelin/theme.conf   |   26 -
 zeppelin-interpreter/pom.xml                    |   21 +-
 .../java/com/nflabs/zeppelin/display/GUI.java   |   68 -
 .../java/com/nflabs/zeppelin/display/Input.java |  458 -
 .../interpreter/ClassloaderInterpreter.java     |  261 -
 .../zeppelin/interpreter/Interpreter.java       |  267 -
 .../interpreter/InterpreterContext.java         |   51 -
 .../interpreter/InterpreterException.java       |   17 -
 .../zeppelin/interpreter/InterpreterGroup.java  |   48 -
 .../interpreter/InterpreterProperty.java        |   32 -
 .../interpreter/InterpreterPropertyBuilder.java |   20 -
 .../zeppelin/interpreter/InterpreterResult.java |  120 -
 .../zeppelin/interpreter/InterpreterUtils.java  |   41 -
 .../interpreter/LazyOpenInterpreter.java        |  131 -
 .../interpreter/WrappedInterpreter.java         |    8 -
 .../interpreter/remote/ClientFactory.java       |   63 -
 .../interpreter/remote/RemoteInterpreter.java   |  330 -
 .../remote/RemoteInterpreterProcess.java        |  192 -
 .../remote/RemoteInterpreterServer.java         |  325 -
 .../remote/RemoteInterpreterUtils.java          |   32 -
 .../thrift/RemoteInterpreterContext.java        |  786 --
 .../thrift/RemoteInterpreterResult.java         |  786 --
 .../thrift/RemoteInterpreterService.java        | 8174 ------------------
 .../zeppelin/scheduler/FIFOScheduler.java       |  134 -
 .../java/com/nflabs/zeppelin/scheduler/Job.java |  246 -
 .../nflabs/zeppelin/scheduler/JobListener.java  |   15 -
 .../zeppelin/scheduler/JobProgressPoller.java   |   53 -
 .../zeppelin/scheduler/ParallelScheduler.java   |  162 -
 .../zeppelin/scheduler/RemoteScheduler.java     |  357 -
 .../nflabs/zeppelin/scheduler/Scheduler.java    |   21 -
 .../zeppelin/scheduler/SchedulerFactory.java    |  129 -
 .../zeppelin/scheduler/SchedulerListener.java   |   13 -
 .../java/org/apache/zeppelin/display/GUI.java   |   85 +
 .../java/org/apache/zeppelin/display/Input.java |  476 +
 .../interpreter/ClassloaderInterpreter.java     |  278 +
 .../zeppelin/interpreter/Interpreter.java       |  283 +
 .../interpreter/InterpreterContext.java         |   68 +
 .../interpreter/InterpreterException.java       |   34 +
 .../zeppelin/interpreter/InterpreterGroup.java  |   65 +
 .../interpreter/InterpreterProperty.java        |   49 +
 .../interpreter/InterpreterPropertyBuilder.java |   37 +
 .../zeppelin/interpreter/InterpreterResult.java |  137 +
 .../zeppelin/interpreter/InterpreterUtils.java  |   36 +
 .../interpreter/LazyOpenInterpreter.java        |  148 +
 .../interpreter/WrappedInterpreter.java         |   25 +
 .../interpreter/remote/ClientFactory.java       |   79 +
 .../interpreter/remote/RemoteInterpreter.java   |  347 +
 .../remote/RemoteInterpreterProcess.java        |  208 +
 .../remote/RemoteInterpreterServer.java         |  342 +
 .../remote/RemoteInterpreterUtils.java          |   49 +
 .../thrift/RemoteInterpreterContext.java        |  786 ++
 .../thrift/RemoteInterpreterResult.java         |  786 ++
 .../thrift/RemoteInterpreterService.java        | 8174 ++++++++++++++++++
 .../zeppelin/scheduler/FIFOScheduler.java       |  151 +
 .../java/org/apache/zeppelin/scheduler/Job.java |  263 +
 .../apache/zeppelin/scheduler/JobListener.java  |   32 +
 .../zeppelin/scheduler/JobProgressPoller.java   |   70 +
 .../zeppelin/scheduler/ParallelScheduler.java   |  179 +
 .../zeppelin/scheduler/RemoteScheduler.java     |  373 +
 .../apache/zeppelin/scheduler/Scheduler.java    |   38 +
 .../zeppelin/scheduler/SchedulerFactory.java    |  145 +
 .../zeppelin/scheduler/SchedulerListener.java   |   30 +
 .../main/thrift/RemoteInterpreterService.thrift |   20 +-
 .../com/nflabs/zeppelin/display/InputTest.java  |   24 -
 .../remote/RemoteInterpreterProcessTest.java    |   46 -
 .../remote/RemoteInterpreterServerTest.java     |   57 -
 .../remote/RemoteInterpreterTest.java           |  428 -
 .../remote/RemoteInterpreterUtilsTest.java      |   16 -
 .../remote/mock/MockInterpreterA.java           |   84 -
 .../remote/mock/MockInterpreterB.java           |  101 -
 .../zeppelin/scheduler/FIFOSchedulerTest.java   |   74 -
 .../scheduler/ParallelSchedulerTest.java        |   49 -
 .../zeppelin/scheduler/RemoteSchedulerTest.java |  105 -
 .../nflabs/zeppelin/scheduler/SleepingJob.java  |   51 -
 .../org/apache/zeppelin/display/InputTest.java  |   39 +
 .../remote/RemoteInterpreterProcessTest.java    |   63 +
 .../remote/RemoteInterpreterServerTest.java     |   76 +
 .../remote/RemoteInterpreterTest.java           |  446 +
 .../remote/RemoteInterpreterUtilsTest.java      |   34 +
 .../remote/mock/MockInterpreterA.java           |  101 +
 .../remote/mock/MockInterpreterB.java           |  118 +
 .../zeppelin/scheduler/FIFOSchedulerTest.java   |   94 +
 .../scheduler/ParallelSchedulerTest.java        |   71 +
 .../zeppelin/scheduler/RemoteSchedulerTest.java |  124 +
 .../apache/zeppelin/scheduler/SleepingJob.java  |   75 +
 .../src/test/resources/log4j.properties         |   17 +
 zeppelin-server/pom.xml                         |   41 +-
 .../zeppelin/rest/InterpreterRestApi.java       |  152 -
 .../nflabs/zeppelin/rest/NotebookResponse.java  |   20 -
 .../nflabs/zeppelin/rest/NotebookRestApi.java   |   98 -
 .../nflabs/zeppelin/rest/ZeppelinRestApi.java   |   35 -
 .../InterpreterSettingListForNoteBind.java      |   67 -
 .../message/NewInterpreterSettingRequest.java   |   36 -
 .../UpdateInterpreterSettingRequest.java        |   28 -
 .../zeppelin/server/AppScriptServlet.java       |   76 -
 .../com/nflabs/zeppelin/server/CorsFilter.java  |   54 -
 .../nflabs/zeppelin/server/JsonResponse.java    |  126 -
 .../nflabs/zeppelin/server/ZeppelinServer.java  |  311 -
 .../com/nflabs/zeppelin/socket/Message.java     |   93 -
 .../nflabs/zeppelin/socket/NotebookServer.java  |  483 --
 .../socket/SslWebSocketServerFactory.java       |   61 -
 .../zeppelin/rest/InterpreterRestApi.java       |  169 +
 .../apache/zeppelin/rest/NotebookResponse.java  |   37 +
 .../apache/zeppelin/rest/NotebookRestApi.java   |  112 +
 .../apache/zeppelin/rest/ZeppelinRestApi.java   |   52 +
 .../InterpreterSettingListForNoteBind.java      |   84 +
 .../message/NewInterpreterSettingRequest.java   |   53 +
 .../UpdateInterpreterSettingRequest.java        |   45 +
 .../zeppelin/server/AppScriptServlet.java       |   93 +
 .../org/apache/zeppelin/server/CorsFilter.java  |   71 +
 .../apache/zeppelin/server/JsonResponse.java    |  145 +
 .../apache/zeppelin/server/ZeppelinServer.java  |  327 +
 .../org/apache/zeppelin/socket/Message.java     |  110 +
 .../apache/zeppelin/socket/NotebookServer.java  |  500 ++
 .../socket/SslWebSocketServerFactory.java       |   76 +
 .../java/com/nflabs/zeppelin/ZeppelinIT.java    |  327 -
 .../zeppelin/rest/AbstractTestRestApi.java      |  239 -
 .../zeppelin/rest/ZeppelinRestApiTest.java      |   95 -
 .../ScreenCaptureHtmlUnitDriver.java            |    3 +-
 .../java/org/apache/zeppelin/ZeppelinIT.java    |  338 +
 .../zeppelin/rest/AbstractTestRestApi.java      |  256 +
 .../zeppelin/rest/ZeppelinRestApiTest.java      |  112 +
 .../src/test/resources/log4j.properties         |   19 +-
 .../zeppelin/AbstractFunctionalSuite.scala      |   65 -
 .../com/nflabs/zeppelin/WelcomePageSuite.scala  |   20 -
 .../zeppelin/AbstractFunctionalSuite.scala      |   82 +
 .../org/apache/zeppelin/WelcomePageSuite.scala  |   37 +
 zeppelin-web/Gruntfile.js                       |   17 +
 zeppelin-web/app/404.html                       |   14 +
 zeppelin-web/app/WEB-INF/web.xml                |   19 +-
 zeppelin-web/app/fonts/custom-font.svg          |   18 +-
 zeppelin-web/app/index.html                     |    2 -
 .../scripts/ace/textarea/src/ace-bookmarklet.js |   15 +-
 zeppelin-web/app/scripts/app.js                 |   15 +-
 .../app/scripts/controllers/interpreter.js      |    3 +-
 zeppelin-web/app/scripts/controllers/main.js    |    3 +-
 zeppelin-web/app/scripts/controllers/nav.js     |    3 +-
 .../app/scripts/controllers/notebook.js         |    3 +-
 .../app/scripts/controllers/paragraph.js        |    3 +-
 .../app/scripts/directives/dropdowninput.js     |   13 +
 zeppelin-web/app/scripts/directives/ngdelete.js |   14 +
 zeppelin-web/app/scripts/directives/ngenter.js  |    3 +-
 .../scripts/directives/popover-html-unsafe.js   |   13 +
 .../app/scripts/directives/resizable.js         |   13 +
 zeppelin-web/app/styles/custom-font.css         |   14 +
 zeppelin-web/app/styles/interpreter.css         |   14 +
 zeppelin-web/app/styles/looknfeel/default.css   |    3 +-
 zeppelin-web/app/styles/looknfeel/report.css    |    3 +-
 zeppelin-web/app/styles/looknfeel/simple.css    |    3 +-
 zeppelin-web/app/styles/main.css                |    3 +-
 zeppelin-web/app/styles/notebook.css            |    3 +-
 zeppelin-web/app/styles/printMode.css           |    3 +-
 zeppelin-web/app/styles/typography.css          |    3 +-
 zeppelin-web/app/views/interpreter.html         |    2 -
 zeppelin-web/app/views/main.html                |    2 -
 zeppelin-web/app/views/modal-shortcut.html      |    2 -
 zeppelin-web/app/views/notebooks.html           |    2 -
 zeppelin-web/app/views/paragraph.html           |    2 -
 .../app/views/popover-html-unsafe-popup.html    |   16 +-
 zeppelin-web/bower                              |   16 +
 zeppelin-web/grunt                              |   16 +
 zeppelin-web/pom.xml                            |   62 +-
 zeppelin-zengine/pom.xml                        |   21 +-
 .../zeppelin/conf/ZeppelinConfiguration.java    |  514 --
 .../interpreter/InterpreterFactory.java         |  596 --
 .../interpreter/InterpreterInfoSaving.java      |   12 -
 .../zeppelin/interpreter/InterpreterOption.java |   24 -
 .../interpreter/InterpreterSerializer.java      |   39 -
 .../interpreter/InterpreterSetting.java         |   91 -
 .../zeppelin/notebook/JobListenerFactory.java   |   13 -
 .../java/com/nflabs/zeppelin/notebook/Note.java |  350 -
 .../notebook/NoteInterpreterLoader.java         |   90 -
 .../com/nflabs/zeppelin/notebook/Notebook.java  |  283 -
 .../com/nflabs/zeppelin/notebook/Paragraph.java |  221 -
 .../zeppelin/notebook/utility/IdHashes.java     |   57 -
 .../java/com/nflabs/zeppelin/util/Util.java     |  170 -
 .../zeppelin/conf/ZeppelinConfiguration.java    |  531 ++
 .../interpreter/InterpreterFactory.java         |  613 ++
 .../interpreter/InterpreterInfoSaving.java      |   29 +
 .../zeppelin/interpreter/InterpreterOption.java |   41 +
 .../interpreter/InterpreterSerializer.java      |   56 +
 .../interpreter/InterpreterSetting.java         |  108 +
 .../zeppelin/notebook/JobListenerFactory.java   |   30 +
 .../java/org/apache/zeppelin/notebook/Note.java |  367 +
 .../notebook/NoteInterpreterLoader.java         |  107 +
 .../org/apache/zeppelin/notebook/Notebook.java  |  299 +
 .../org/apache/zeppelin/notebook/Paragraph.java |  237 +
 .../zeppelin/notebook/utility/IdHashes.java     |   74 +
 .../java/org/apache/zeppelin/util/Util.java     |  187 +
 zeppelin-zengine/src/main/resources/exec.erb    |   15 -
 zeppelin-zengine/src/main/resources/table.erb   |   36 -
 .../interpreter/InterpreterFactoryTest.java     |  112 -
 .../interpreter/mock/MockInterpreter1.java      |   57 -
 .../interpreter/mock/MockInterpreter2.java      |   57 -
 .../nflabs/zeppelin/notebook/NotebookTest.java  |  173 -
 .../java/com/nflabs/zeppelin/util/UtilTest.java |   79 -
 .../com/nflabs/zeppelin/util/UtilsForTests.java |  104 -
 .../interpreter/InterpreterFactoryTest.java     |  128 +
 .../interpreter/mock/MockInterpreter1.java      |   74 +
 .../interpreter/mock/MockInterpreter2.java      |   74 +
 .../apache/zeppelin/notebook/NotebookTest.java  |  189 +
 .../java/org/apache/zeppelin/util/UtilTest.java |  100 +
 .../org/apache/zeppelin/util/UtilsForTests.java |  119 +
 284 files changed, 25764 insertions(+), 24116 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index b44ee80..7eb5935 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,3 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 language: java
 jdk:
   - oraclejdk7

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/NOTICE
----------------------------------------------------------------------
diff --git a/NOTICE b/NOTICE
index 7199f2d..c455bcb 100644
--- a/NOTICE
+++ b/NOTICE
@@ -1,6 +1,54 @@
-Zeppelin
-Copyright 2013-2014 NFLabs, inc.
+Apache Zeppelin (incubating)
+Copyright 2015 The Apache Software Foundation
 
-This product includes software developed by The Apache Software
-Foundation (http://www.apache.org/).
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
 
+
+Portions of this software were developed at NFLabs, Inc. (http://www.nflabs.com)
+Copyright (c) 2010-2015 NFLabs Inc.
+
+
+
+This product contains webfonts from 'Font Awesome', which can be obtains at:
+
+  * LICENSE:
+    * SIL OFL 1.1 (http://scripts.sil.org/OFL)
+  * HOMEPAGE:
+    * http://fortawesome.github.io/Font-Awesome/
+
+
+
+This product contains a stylesheet from 'Font Awesome', which can be obtains at:
+
+  * LICENSE:
+    * MIT License (http://opensource.org/licenses/mit-license.html)
+  * HOMEPAGE:
+    * http://fortawesome.github.io/Font-Awesome/
+
+
+
+This product contains embeddable code editor 'Ace', which can be obtains at:
+
+  * LICENSE:
+    * BSD (https://github.com/ajaxorg/ace/blob/master/LICENSE)
+      Copyright (c) 2010, Ajax.org B.V.
+      All rights reserved. 
+  * HOMEPAGE:
+    * http://ace.c9.io/
+
+
+
+This product contains 'Simple line icons', which can be obtains at:
+
+  * LICENSE:
+    * MIT License (http://opensource.org/licenses/mit-license.html)
+  * HOMEPAGE:
+    * http://thesabbir.github.io/simple-line-icons/
+
+
+from https://code.google.com/p/selenium/issues/detail?id=1361
+
+
+The com.webautomation package contains software developed by Ivan Vasiliev
+(https://groups.google.com/forum/#!msg/selenium-developers/PTR_j4xLVRM/k2yVq01Fa7oJ)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/_tools/checkstyle.xml
----------------------------------------------------------------------
diff --git a/_tools/checkstyle.xml b/_tools/checkstyle.xml
index 0157e84..618d74d 100644
--- a/_tools/checkstyle.xml
+++ b/_tools/checkstyle.xml
@@ -1,4 +1,19 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+     http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
 <!DOCTYPE module PUBLIC
     "-//Puppy Crawl//DTD Check Configuration 1.3//EN"
     "http://www.puppycrawl.com/dtds/configuration_1_3.dtd">

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/bin/common.sh
----------------------------------------------------------------------
diff --git a/bin/common.sh b/bin/common.sh
index ccd8a29..a825b76 100644
--- a/bin/common.sh
+++ b/bin/common.sh
@@ -1,16 +1,13 @@
 #!/bin/bash
 #
-# Copyright 2007 The Apache Software Foundation
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
 #
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
+#    http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/bin/interpreter.sh
----------------------------------------------------------------------
diff --git a/bin/interpreter.sh b/bin/interpreter.sh
index 4aa3ca0..025df36 100755
--- a/bin/interpreter.sh
+++ b/bin/interpreter.sh
@@ -1,4 +1,20 @@
 #!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 bin=$(dirname "${BASH_SOURCE-$0}")
 bin=$(cd "${bin}">/dev/null; pwd)
@@ -39,7 +55,7 @@ addJarInDir "${INTERPRETER_DIR}"
 export CLASSPATH+=":${ZEPPELIN_CLASSPATH}"
 
 HOSTNAME=$(hostname)
-ZEPPELIN_SERVER=com.nflabs.zeppelin.interpreter.remote.RemoteInterpreterServer
+ZEPPELIN_SERVER=org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer
 
 INTERPRETER_ID=$(basename "${INTERPRETER_DIR}")
 ZEPPELIN_PID="${ZEPPELIN_PID_DIR}/zeppelin-interpreter-${INTERPRETER_ID}-${ZEPPELIN_IDENT_STRING}-${HOSTNAME}.pid"

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/bin/zeppelin-daemon.sh
----------------------------------------------------------------------
diff --git a/bin/zeppelin-daemon.sh b/bin/zeppelin-daemon.sh
index b5aaeff..56e6c88 100755
--- a/bin/zeppelin-daemon.sh
+++ b/bin/zeppelin-daemon.sh
@@ -51,7 +51,7 @@ ZEPPELIN_NAME="Zeppelin"
 ZEPPELIN_LOGFILE="${ZEPPELIN_LOG_DIR}/zeppelin-${ZEPPELIN_IDENT_STRING}-${HOSTNAME}.log"
 ZEPPELIN_OUTFILE="${ZEPPELIN_LOG_DIR}/zeppelin-${ZEPPELIN_IDENT_STRING}-${HOSTNAME}.out"
 ZEPPELIN_PID="${ZEPPELIN_PID_DIR}/zeppelin-${ZEPPELIN_IDENT_STRING}-${HOSTNAME}.pid"
-ZEPPELIN_MAIN=com.nflabs.zeppelin.server.ZeppelinServer
+ZEPPELIN_MAIN=org.apache.zeppelin.server.ZeppelinServer
 JAVA_OPTS+=" -Dzeppelin.log.file=${ZEPPELIN_LOGFILE}"
 
 if [[ "${ZEPPELIN_NICENESS}" = "" ]]; then

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/bin/zeppelin.sh
----------------------------------------------------------------------
diff --git a/bin/zeppelin.sh b/bin/zeppelin.sh
index 87dd7df..671673a 100755
--- a/bin/zeppelin.sh
+++ b/bin/zeppelin.sh
@@ -48,7 +48,7 @@ HOSTNAME=$(hostname)
 ZEPPELIN_LOGFILE="${ZEPPELIN_LOG_DIR}/zeppelin-${ZEPPELIN_IDENT_STRING}-${HOSTNAME}.log"
 LOG="${ZEPPELIN_LOG_DIR}/zeppelin-cli-${ZEPPELIN_IDENT_STRING}-${HOSTNAME}.out"
   
-ZEPPELIN_SERVER=com.nflabs.zeppelin.server.ZeppelinServer
+ZEPPELIN_SERVER=org.apache.zeppelin.server.ZeppelinServer
 JAVA_OPTS+=" -Dzeppelin.log.file=${ZEPPELIN_LOGFILE}"
 
 if [[ ! -d "${ZEPPELIN_LOG_DIR}" ]]; then

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/conf/log4j.properties
----------------------------------------------------------------------
diff --git a/conf/log4j.properties b/conf/log4j.properties
index 083940d..a7ef28b 100644
--- a/conf/log4j.properties
+++ b/conf/log4j.properties
@@ -1,3 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 log4j.rootLogger = INFO, dailyfile
 
 log4j.appender.stdout = org.apache.log4j.ConsoleAppender

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/conf/zeppelin-env.sh.template
----------------------------------------------------------------------
diff --git a/conf/zeppelin-env.sh.template b/conf/zeppelin-env.sh.template
index d6b6b00..62aa95a 100644
--- a/conf/zeppelin-env.sh.template
+++ b/conf/zeppelin-env.sh.template
@@ -1,4 +1,20 @@
 #!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 # export JAVA_HOME=
 # export MASTER=                 # Spark master url. eg. spark://master_addr:7077. Leave empty if you want to use local mode

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/conf/zeppelin-site.xml.template
----------------------------------------------------------------------
diff --git a/conf/zeppelin-site.xml.template b/conf/zeppelin-site.xml.template
index 7ce34f4..c203179 100644
--- a/conf/zeppelin-site.xml.template
+++ b/conf/zeppelin-site.xml.template
@@ -48,7 +48,7 @@
 
 <property>
   <name>zeppelin.interpreters</name>
-  <value>com.nflabs.zeppelin.spark.SparkInterpreter,com.nflabs.zeppelin.spark.PySparkInterpreter,com.nflabs.zeppelin.spark.SparkSqlInterpreter,com.nflabs.zeppelin.spark.DepInterpreter,com.nflabs.zeppelin.markdown.Markdown,com.nflabs.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.hive.HiveInterpreter</value>
+  <value>org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.hive.HiveInterpreter</value>
   <description>Comma separated interpreter configurations. First interpreter become a default</description>
 </property>
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/hive/pom.xml
----------------------------------------------------------------------
diff --git a/hive/pom.xml b/hive/pom.xml
index fbc853d..8899979 100644
--- a/hive/pom.xml
+++ b/hive/pom.xml
@@ -1,10 +1,27 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
 
@@ -21,7 +38,7 @@
   </properties>
   <dependencies>
     <dependency>
-      <groupId>com.nflabs.zeppelin</groupId>
+      <groupId>org.apache.zeppelin</groupId>
       <artifactId>zeppelin-interpreter</artifactId>
       <version>${project.version}</version>
       <scope>provided</scope>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java
----------------------------------------------------------------------
diff --git a/hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java b/hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java
index 1fa9228..5e7fc7d 100644
--- a/hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java
+++ b/hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java
@@ -21,14 +21,14 @@ import java.sql.*;
 import java.util.List;
 import java.util.Properties;
 
-import com.nflabs.zeppelin.interpreter.*;
+import org.apache.zeppelin.interpreter.*;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
 
 /**
  * Hive interpreter for Zeppelin.

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/hive/src/test/java/org/apache/zeppelin/hive/HiveInterpreterTest.java
----------------------------------------------------------------------
diff --git a/hive/src/test/java/org/apache/zeppelin/hive/HiveInterpreterTest.java b/hive/src/test/java/org/apache/zeppelin/hive/HiveInterpreterTest.java
index 4bae60b..41ab108 100644
--- a/hive/src/test/java/org/apache/zeppelin/hive/HiveInterpreterTest.java
+++ b/hive/src/test/java/org/apache/zeppelin/hive/HiveInterpreterTest.java
@@ -29,8 +29,8 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.concurrent.Executor;
 
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/markdown/pom.xml
----------------------------------------------------------------------
diff --git a/markdown/pom.xml b/markdown/pom.xml
index 3b6cdef..8e6cc52 100644
--- a/markdown/pom.xml
+++ b/markdown/pom.xml
@@ -1,19 +1,36 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+  
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
 
-  <groupId>com.nflabs.zeppelin</groupId>
+  <groupId>org.apache.zeppelin</groupId>
   <artifactId>zeppelin-markdown</artifactId>
   <packaging>jar</packaging>
   <version>0.5.0-SNAPSHOT</version>
   <name>Zeppelin: Markdown interpreter</name>
-  <url>http://www.nflabs.com</url>
+  <url>http://zeppelin.incubator.apache.org</url>
 
   <dependencies>
     <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/markdown/src/main/java/com/nflabs/zeppelin/markdown/Markdown.java
----------------------------------------------------------------------
diff --git a/markdown/src/main/java/com/nflabs/zeppelin/markdown/Markdown.java b/markdown/src/main/java/com/nflabs/zeppelin/markdown/Markdown.java
deleted file mode 100644
index aec8020..0000000
--- a/markdown/src/main/java/com/nflabs/zeppelin/markdown/Markdown.java
+++ /dev/null
@@ -1,77 +0,0 @@
-package com.nflabs.zeppelin.markdown;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Properties;
-
-import com.nflabs.zeppelin.interpreter.InterpreterUtils;
-import org.markdown4j.Markdown4jProcessor;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-/**
- * Markdown interpreter for Zeppelin.
- *
- * @author Leemoonsoo
- * @author anthonycorbacho
- *
- */
-public class Markdown extends Interpreter {
-  private Markdown4jProcessor md;
-
-  static {
-    Interpreter.register("md", Markdown.class.getName());
-  }
-
-  public Markdown(Properties property) {
-    super(property);
-  }
-
-  @Override
-  public void open() {
-    md = new Markdown4jProcessor();
-  }
-
-  @Override
-  public void close() {}
-
-  @Override
-  public InterpreterResult interpret(String st, InterpreterContext interpreterContext) {
-    String html;
-    try {
-      html = md.process(st);
-    } catch (IOException | java.lang.RuntimeException e) {
-      return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e));
-    }
-    return new InterpreterResult(Code.SUCCESS, "%html " + html);
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {}
-
-  @Override
-  public FormType getFormType() {
-    return FormType.SIMPLE;
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    return 0;
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    return SchedulerFactory.singleton().createOrGetParallelScheduler(
-        Markdown.class.getName() + this.hashCode(), 5);
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    return null;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/markdown/src/main/java/org/apache/zeppelin/markdown/Markdown.java
----------------------------------------------------------------------
diff --git a/markdown/src/main/java/org/apache/zeppelin/markdown/Markdown.java b/markdown/src/main/java/org/apache/zeppelin/markdown/Markdown.java
new file mode 100644
index 0000000..88b3287
--- /dev/null
+++ b/markdown/src/main/java/org/apache/zeppelin/markdown/Markdown.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.markdown;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.interpreter.InterpreterUtils;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.markdown4j.Markdown4jProcessor;
+
+/**
+ * Markdown interpreter for Zeppelin.
+ *
+ * @author Leemoonsoo
+ * @author anthonycorbacho
+ *
+ */
+public class Markdown extends Interpreter {
+  private Markdown4jProcessor md;
+
+  static {
+    Interpreter.register("md", Markdown.class.getName());
+  }
+
+  public Markdown(Properties property) {
+    super(property);
+  }
+
+  @Override
+  public void open() {
+    md = new Markdown4jProcessor();
+  }
+
+  @Override
+  public void close() {}
+
+  @Override
+  public InterpreterResult interpret(String st, InterpreterContext interpreterContext) {
+    String html;
+    try {
+      html = md.process(st);
+    } catch (IOException | java.lang.RuntimeException e) {
+      return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e));
+    }
+    return new InterpreterResult(Code.SUCCESS, "%html " + html);
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {}
+
+  @Override
+  public FormType getFormType() {
+    return FormType.SIMPLE;
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    return 0;
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    return SchedulerFactory.singleton().createOrGetParallelScheduler(
+        Markdown.class.getName() + this.hashCode(), 5);
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/markdown/src/test/java/com/nflabs/zeppelin/markdown/MarkdownTest.java
----------------------------------------------------------------------
diff --git a/markdown/src/test/java/com/nflabs/zeppelin/markdown/MarkdownTest.java b/markdown/src/test/java/com/nflabs/zeppelin/markdown/MarkdownTest.java
deleted file mode 100644
index f62711c..0000000
--- a/markdown/src/test/java/com/nflabs/zeppelin/markdown/MarkdownTest.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package com.nflabs.zeppelin.markdown;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.Properties;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-
-public class MarkdownTest {
-
-	@Before
-	public void setUp() throws Exception {
-	}
-
-	@After
-	public void tearDown() throws Exception {
-	}
-
-	@Test
-	public void test() {
-		Markdown md = new Markdown(new Properties());
-		md.open();
-		InterpreterResult result = md.interpret("This is ~~deleted~~ text", null);
-		assertEquals("<p>This is <s>deleted</s> text</p>\n", result.message());
-		System.out.println(MarkdownTest.class.getName());
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/markdown/src/test/java/org/apache/zeppelin/markdown/MarkdownTest.java
----------------------------------------------------------------------
diff --git a/markdown/src/test/java/org/apache/zeppelin/markdown/MarkdownTest.java b/markdown/src/test/java/org/apache/zeppelin/markdown/MarkdownTest.java
new file mode 100644
index 0000000..86d6c59
--- /dev/null
+++ b/markdown/src/test/java/org/apache/zeppelin/markdown/MarkdownTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.markdown;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Properties;
+
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.markdown.Markdown;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class MarkdownTest {
+
+	@Before
+	public void setUp() throws Exception {
+	}
+
+	@After
+	public void tearDown() throws Exception {
+	}
+
+	@Test
+	public void test() {
+		Markdown md = new Markdown(new Properties());
+		md.open();
+		InterpreterResult result = md.interpret("This is ~~deleted~~ text", null);
+		assertEquals("<p>This is <s>deleted</s> text</p>\n", result.message());
+		System.out.println(MarkdownTest.class.getName());
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7048a89..20a073e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,4 +1,21 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
 
@@ -15,7 +32,7 @@
 
   <modelVersion>4.0.0</modelVersion>
 
-  <groupId>com.nflabs.zeppelin</groupId>
+  <groupId>org.apache.zeppelin</groupId>
   <artifactId>zeppelin</artifactId>
   <packaging>pom</packaging>
   <version>0.5.0-SNAPSHOT</version>
@@ -38,9 +55,9 @@
   </licenses>
 
   <scm>
-    <url>http://www.github.com/NFLabs/zeppelin</url>
-    <connection>scm:git:git:git@github.com:NFLabs/zeppelin.git</connection>
-    <developerConnection>scm:git:git@github.com:NFLabs/zeppelin.git</developerConnection>
+    <url>http://zeppelin.incubator.apache.org</url>
+    <connection>scm:git:git:git@github.com:apache/incubator-zeppelin.git</connection>
+    <developerConnection>scm:git:git@github.com:apache/incubator-zeppelin.git</developerConnection>
   </scm>
 
   <developers>
@@ -853,6 +870,10 @@
   <build>
     <plugins>
       <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+      </plugin>
+      <plugin>
         <artifactId>maven-compiler-plugin</artifactId>
         <version>3.1</version>
         <configuration>
@@ -899,7 +920,7 @@
             </goals>
             <configuration>
               <failOnViolation>true</failOnViolation>
-              <excludes>com/nflabs/zeppelin/interpreter/thrift/*</excludes>
+              <excludes>org/apache/zeppelin/interpreter/thrift/*</excludes>
             </configuration>
           </execution>
         </executions>
@@ -1024,6 +1045,56 @@
     <pluginManagement>
       <plugins>
         <plugin>
+          <groupId>org.apache.rat</groupId>
+          <artifactId>apache-rat-plugin</artifactId>
+          <version>0.11</version>
+          <configuration>
+            <excludes>
+              <exclude>**/.idea/</exclude>
+              <exclude>**/*.iml</exclude>
+              <exclude>.git/</exclude>
+              <exclude>.gitignore</exclude>
+              <exclude>.repository/</exclude>
+              <exclude>**/*.diff</exclude>
+              <exclude>**/*.patch</exclude>
+              <exclude>**/*.avsc</exclude>
+              <exclude>**/*.avro</exclude>
+              <exclude>**/*.log</exclude>
+              <exclude>**/docs/**</exclude>
+              <exclude>**/test/resources/**</exclude>
+              <exclude>**/.settings/*</exclude>
+              <exclude>**/.classpath</exclude>
+              <exclude>**/.project</exclude>
+              <exclude>**/target/**</exclude>
+              <exclude>**/derby.log</exclude>
+              <exclude>**/metastore_db/</exclude>
+              <exclude>**/logs/**</exclude>
+              <exclude>**/run/**</exclude>
+              <exclude>**/interpreter/**</exclude>
+              <exclude>**/local-repo/**</exclude>
+              <exclude>**/null/**</exclude>
+              <exclude>**/notebook/**</exclude>
+              <exclude>_tools/site/css/*</exclude>
+              <exclude>**/README.md</exclude>
+              <exclude>DEPLOY.md</exclude>
+              <exclude>CONTRIBUTING.md</exclude>
+              <exclude>STYLE.md</exclude>
+              <exclude>Roadmap.md</exclude>
+              <exclude>conf/interpreter.json</exclude>
+            </excludes>
+          </configuration>
+
+          <executions>
+            <execution>
+              <id>verify.rat</id>
+              <phase>verify</phase>
+              <goals>
+                <goal>check</goal>
+              </goals>
+            </execution>
+          </executions>
+        </plugin>
+        <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-checkstyle-plugin</artifactId>
           <version>2.13</version>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/shell/pom.xml
----------------------------------------------------------------------
diff --git a/shell/pom.xml b/shell/pom.xml
index d2ea754..7261332 100644
--- a/shell/pom.xml
+++ b/shell/pom.xml
@@ -1,19 +1,36 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
 
-  <groupId>com.nflabs.zeppelin</groupId>
+  <groupId>org.apache.zeppelin</groupId>
   <artifactId>zeppelin-shell</artifactId>
   <packaging>jar</packaging>
   <version>0.5.0-SNAPSHOT</version>
   <name>Zeppelin: Shell interpreter</name>
-  <url>http://www.nflabs.com</url>
+  <url>http://zeppelin.incubator.apache.org</url>
 
   <dependencies>
     <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/shell/src/main/java/com/nflabs/zeppelin/shell/ShellInterpreter.java
----------------------------------------------------------------------
diff --git a/shell/src/main/java/com/nflabs/zeppelin/shell/ShellInterpreter.java b/shell/src/main/java/com/nflabs/zeppelin/shell/ShellInterpreter.java
deleted file mode 100644
index d1762f6..0000000
--- a/shell/src/main/java/com/nflabs/zeppelin/shell/ShellInterpreter.java
+++ /dev/null
@@ -1,97 +0,0 @@
-package com.nflabs.zeppelin.shell;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.List;
-import java.util.Properties;
-
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.exec.DefaultExecutor;
-import org.apache.commons.exec.ExecuteException;
-import org.apache.commons.exec.ExecuteWatchdog;
-import org.apache.commons.exec.PumpStreamHandler;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-/**
- * Shell interpreter for Zeppelin.
- *
- * @author Leemoonsoo
- * @author anthonycorbacho
- *
- */
-public class ShellInterpreter extends Interpreter {
-  Logger logger = LoggerFactory.getLogger(ShellInterpreter.class);
-  int commandTimeOut = 600000;
-
-  static {
-    Interpreter.register("sh", ShellInterpreter.class.getName());
-  }
-
-  public ShellInterpreter(Properties property) {
-    super(property);
-  }
-
-  @Override
-  public void open() {}
-
-  @Override
-  public void close() {}
-
-
-  @Override
-  public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) {
-    logger.info("Run shell command '" + cmd + "'");
-    long start = System.currentTimeMillis();
-    CommandLine cmdLine = CommandLine.parse("bash");
-    cmdLine.addArgument("-c", false);
-    cmdLine.addArgument(cmd, false);
-    DefaultExecutor executor = new DefaultExecutor();
-    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
-    executor.setStreamHandler(new PumpStreamHandler(outputStream));
-
-    executor.setWatchdog(new ExecuteWatchdog(commandTimeOut));
-    try {
-      int exitValue = executor.execute(cmdLine);
-      return new InterpreterResult(InterpreterResult.Code.SUCCESS, outputStream.toString());
-    } catch (ExecuteException e) {
-      logger.error("Can not run " + cmd, e);
-      return new InterpreterResult(Code.ERROR, e.getMessage());
-    } catch (IOException e) {
-      logger.error("Can not run " + cmd, e);
-      return new InterpreterResult(Code.ERROR, e.getMessage());
-    }
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {}
-
-  @Override
-  public FormType getFormType() {
-    return FormType.SIMPLE;
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    return 0;
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    return SchedulerFactory.singleton().createOrGetFIFOScheduler(
-        ShellInterpreter.class.getName() + this.hashCode());
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/shell/src/main/java/org/apache/zeppelin/shell/ShellInterpreter.java
----------------------------------------------------------------------
diff --git a/shell/src/main/java/org/apache/zeppelin/shell/ShellInterpreter.java b/shell/src/main/java/org/apache/zeppelin/shell/ShellInterpreter.java
new file mode 100644
index 0000000..64a9485
--- /dev/null
+++ b/shell/src/main/java/org/apache/zeppelin/shell/ShellInterpreter.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.shell;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.exec.DefaultExecutor;
+import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.exec.ExecuteWatchdog;
+import org.apache.commons.exec.PumpStreamHandler;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Shell interpreter for Zeppelin.
+ *
+ * @author Leemoonsoo
+ * @author anthonycorbacho
+ *
+ */
+public class ShellInterpreter extends Interpreter {
+  Logger logger = LoggerFactory.getLogger(ShellInterpreter.class);
+  int commandTimeOut = 600000;
+
+  static {
+    Interpreter.register("sh", ShellInterpreter.class.getName());
+  }
+
+  public ShellInterpreter(Properties property) {
+    super(property);
+  }
+
+  @Override
+  public void open() {}
+
+  @Override
+  public void close() {}
+
+
+  @Override
+  public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) {
+    logger.info("Run shell command '" + cmd + "'");
+    long start = System.currentTimeMillis();
+    CommandLine cmdLine = CommandLine.parse("bash");
+    cmdLine.addArgument("-c", false);
+    cmdLine.addArgument(cmd, false);
+    DefaultExecutor executor = new DefaultExecutor();
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+    executor.setStreamHandler(new PumpStreamHandler(outputStream));
+
+    executor.setWatchdog(new ExecuteWatchdog(commandTimeOut));
+    try {
+      int exitValue = executor.execute(cmdLine);
+      return new InterpreterResult(InterpreterResult.Code.SUCCESS, outputStream.toString());
+    } catch (ExecuteException e) {
+      logger.error("Can not run " + cmd, e);
+      return new InterpreterResult(Code.ERROR, e.getMessage());
+    } catch (IOException e) {
+      logger.error("Can not run " + cmd, e);
+      return new InterpreterResult(Code.ERROR, e.getMessage());
+    }
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {}
+
+  @Override
+  public FormType getFormType() {
+    return FormType.SIMPLE;
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    return 0;
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    return SchedulerFactory.singleton().createOrGetFIFOScheduler(
+        ShellInterpreter.class.getName() + this.hashCode());
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    return null;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/pom.xml
----------------------------------------------------------------------
diff --git a/spark/pom.xml b/spark/pom.xml
index 7741d2b..f3143c0 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -1,15 +1,31 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
 
+  <groupId>org.apache.zeppelin</groupId>
   <artifactId>zeppelin-spark</artifactId>
   <packaging>jar</packaging>
   <version>0.5.0-SNAPSHOT</version>
@@ -276,6 +292,26 @@
   <build>
     <plugins>
       <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>**/.idea/</exclude>
+            <exclude>**/*.iml</exclude>
+            <exclude>.gitignore</exclude>
+            <exclude>**/.settings/*</exclude>
+            <exclude>**/.classpath</exclude>
+            <exclude>**/.project</exclude>
+            <exclude>**/target/**</exclude>
+            <exclude>**/derby.log</exclude>
+            <exclude>**/metastore_db/</exclude>
+            <exclude>**/README.md</exclude>
+            <exclude>dependency-reduced-pom.xml</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+
+      <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-deploy-plugin</artifactId>
         <version>2.7</version>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/DepInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/DepInterpreter.java b/spark/src/main/java/com/nflabs/zeppelin/spark/DepInterpreter.java
deleted file mode 100644
index de09772..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/DepInterpreter.java
+++ /dev/null
@@ -1,273 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.PrintStream;
-import java.io.PrintWriter;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.spark.repl.SparkILoop;
-import org.apache.spark.repl.SparkIMain;
-import org.apache.spark.repl.SparkJLineCompletion;
-import org.sonatype.aether.resolution.ArtifactResolutionException;
-import org.sonatype.aether.resolution.DependencyResolutionException;
-
-import scala.Console;
-import scala.None;
-import scala.Some;
-import scala.tools.nsc.Settings;
-import scala.tools.nsc.interpreter.Completion.Candidates;
-import scala.tools.nsc.interpreter.Completion.ScalaCompleter;
-import scala.tools.nsc.settings.MutableSettings.BooleanSetting;
-import scala.tools.nsc.settings.MutableSettings.PathSetting;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterPropertyBuilder;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-import com.nflabs.zeppelin.interpreter.WrappedInterpreter;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.spark.dep.DependencyContext;
-
-
-/**
- * DepInterpreter downloads dependencies and pass them when SparkInterpreter initialized.
- * It extends SparkInterpreter but does not create sparkcontext
- *
- */
-public class DepInterpreter extends Interpreter {
-
-  static {
-    Interpreter.register(
-        "dep",
-        "spark",
-        DepInterpreter.class.getName(),
-        new InterpreterPropertyBuilder()
-            .add("zeppelin.dep.localrepo", "local-repo", "local repository for dependency loader")
-            .build());
-
-  }
-
-  private SparkIMain intp;
-  private ByteArrayOutputStream out;
-  private DependencyContext depc;
-  private SparkJLineCompletion completor;
-  private SparkILoop interpreter;
-
-  public DepInterpreter(Properties property) {
-    super(property);
-  }
-
-  public DependencyContext getDependencyContext() {
-    return depc;
-  }
-
-
-  @Override
-  public void close() {
-    if (intp != null) {
-      intp.close();
-    }
-  }
-
-  @Override
-  public void open() {
-    out = new ByteArrayOutputStream();
-    createIMain();
-  }
-
-
-  private void createIMain() {
-    Settings settings = new Settings();
-    URL[] urls = getClassloaderUrls();
-
-    // set classpath for scala compiler
-    PathSetting pathSettings = settings.classpath();
-    String classpath = "";
-    List<File> paths = currentClassPath();
-    for (File f : paths) {
-      if (classpath.length() > 0) {
-        classpath += File.pathSeparator;
-      }
-      classpath += f.getAbsolutePath();
-    }
-
-    if (urls != null) {
-      for (URL u : urls) {
-        if (classpath.length() > 0) {
-          classpath += File.pathSeparator;
-        }
-        classpath += u.getFile();
-      }
-    }
-
-    pathSettings.v_$eq(classpath);
-    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);
-
-    // set classloader for scala compiler
-    settings.explicitParentLoader_$eq(new Some<ClassLoader>(Thread.currentThread()
-        .getContextClassLoader()));
-
-    BooleanSetting b = (BooleanSetting) settings.usejavacp();
-    b.v_$eq(true);
-    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
-
-    interpreter = new SparkILoop(null, new PrintWriter(out));
-    interpreter.settings_$eq(settings);
-
-    interpreter.createInterpreter();
-
-
-    intp = interpreter.intp();
-    intp.setContextClassLoader();
-    intp.initializeSynchronous();
-
-    depc = new DependencyContext(getProperty("zeppelin.dep.localrepo"));
-    completor = new SparkJLineCompletion(intp);
-
-    intp.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
-    Map<String, Object> binder = (Map<String, Object>) getValue("_binder");
-    binder.put("depc", depc);
-
-    intp.interpret("@transient val z = "
-        + "_binder.get(\"depc\").asInstanceOf[com.nflabs.zeppelin.spark.dep.DependencyContext]");
-
-  }
-
-  public Object getValue(String name) {
-    Object ret = intp.valueOfTerm(name);
-    if (ret instanceof None) {
-      return null;
-    } else if (ret instanceof Some) {
-      return ((Some) ret).get();
-    } else {
-      return ret;
-    }
-  }
-
-  @Override
-  public InterpreterResult interpret(String st, InterpreterContext context) {
-    PrintStream printStream = new PrintStream(out);
-    Console.setOut(printStream);
-    out.reset();
-
-    SparkInterpreter sparkInterpreter = getSparkInterpreter();
-
-    if (sparkInterpreter != null && sparkInterpreter.isSparkContextInitialized()) {
-      return new InterpreterResult(Code.ERROR,
-          "Must be used before SparkInterpreter (%spark) initialized");
-    }
-
-    scala.tools.nsc.interpreter.Results.Result ret = intp.interpret(st);
-    Code code = getResultCode(ret);
-
-    try {
-      depc.fetch();
-    } catch (MalformedURLException | DependencyResolutionException
-        | ArtifactResolutionException e) {
-      return new InterpreterResult(Code.ERROR, e.toString());
-    }
-
-    if (code == Code.INCOMPLETE) {
-      return new InterpreterResult(code, "Incomplete expression");
-    } else if (code == Code.ERROR) {
-      return new InterpreterResult(code, out.toString());
-    } else {
-      return new InterpreterResult(code, out.toString());
-    }
-  }
-
-  private Code getResultCode(scala.tools.nsc.interpreter.Results.Result r) {
-    if (r instanceof scala.tools.nsc.interpreter.Results.Success$) {
-      return Code.SUCCESS;
-    } else if (r instanceof scala.tools.nsc.interpreter.Results.Incomplete$) {
-      return Code.INCOMPLETE;
-    } else {
-      return Code.ERROR;
-    }
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {
-  }
-
-
-  @Override
-  public FormType getFormType() {
-    return FormType.NATIVE;
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    return 0;
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    ScalaCompleter c = completor.completer();
-    Candidates ret = c.complete(buf, cursor);
-    return scala.collection.JavaConversions.asJavaList(ret.candidates());
-  }
-
-  private List<File> currentClassPath() {
-    List<File> paths = classPath(Thread.currentThread().getContextClassLoader());
-    String[] cps = System.getProperty("java.class.path").split(File.pathSeparator);
-    if (cps != null) {
-      for (String cp : cps) {
-        paths.add(new File(cp));
-      }
-    }
-    return paths;
-  }
-
-  private List<File> classPath(ClassLoader cl) {
-    List<File> paths = new LinkedList<File>();
-    if (cl == null) {
-      return paths;
-    }
-
-    if (cl instanceof URLClassLoader) {
-      URLClassLoader ucl = (URLClassLoader) cl;
-      URL[] urls = ucl.getURLs();
-      if (urls != null) {
-        for (URL url : urls) {
-          paths.add(new File(url.getFile()));
-        }
-      }
-    }
-    return paths;
-  }
-
-  private SparkInterpreter getSparkInterpreter() {
-    InterpreterGroup intpGroup = getInterpreterGroup();
-    if (intpGroup == null) {
-      return null;
-    }
-    synchronized (intpGroup) {
-      for (Interpreter intp : intpGroup){
-        if (intp.getClassName().equals(SparkInterpreter.class.getName())) {
-          Interpreter p = intp;
-          while (p instanceof WrappedInterpreter) {
-            p = ((WrappedInterpreter) p).getInnerInterpreter();
-          }
-          return (SparkInterpreter) p;
-        }
-      }
-    }
-    return null;
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    return getSparkInterpreter().getScheduler();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/PySparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/PySparkInterpreter.java b/spark/src/main/java/com/nflabs/zeppelin/spark/PySparkInterpreter.java
deleted file mode 100644
index f09667d..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/PySparkInterpreter.java
+++ /dev/null
@@ -1,406 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import java.io.BufferedWriter;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.PipedInputStream;
-import java.io.PipedOutputStream;
-import java.net.ServerSocket;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.commons.compress.utils.IOUtils;
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.exec.DefaultExecutor;
-import org.apache.commons.exec.ExecuteException;
-import org.apache.commons.exec.ExecuteResultHandler;
-import org.apache.commons.exec.ExecuteWatchdog;
-import org.apache.commons.exec.PumpStreamHandler;
-import org.apache.commons.exec.environment.EnvironmentUtils;
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.spark.sql.SQLContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import py4j.GatewayServer;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterPropertyBuilder;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-import com.nflabs.zeppelin.interpreter.LazyOpenInterpreter;
-import com.nflabs.zeppelin.interpreter.WrappedInterpreter;
-
-/**
- *
- */
-public class PySparkInterpreter extends Interpreter implements ExecuteResultHandler {
-  Logger logger = LoggerFactory.getLogger(PySparkInterpreter.class);
-  private GatewayServer gatewayServer;
-  private DefaultExecutor executor;
-  private int port;
-  private ByteArrayOutputStream outputStream;
-  private ByteArrayOutputStream errStream;
-  private BufferedWriter ins;
-  private PipedInputStream in;
-  private ByteArrayOutputStream input;
-  private String scriptPath;
-  boolean pythonscriptRunning = false;
-
-  static {
-    Interpreter.register(
-        "pyspark",
-        "spark",
-        PySparkInterpreter.class.getName(),
-        new InterpreterPropertyBuilder()
-          .add("spark.home",
-               SparkInterpreter.getSystemDefault("SPARK_HOME", "spark.home", ""),
-               "Spark home path. Should be provided for pyspark")
-          .add("zeppelin.pyspark.python",
-               SparkInterpreter.getSystemDefault("PYSPARK_PYTHON", null, "python"),
-               "Python command to run pyspark with").build());
-  }
-
-  public PySparkInterpreter(Properties property) {
-    super(property);
-
-    scriptPath = System.getProperty("java.io.tmpdir") + "/zeppelin_pyspark.py";
-  }
-
-  private String getSparkHome() {
-    String sparkHome = getProperty("spark.home");
-    if (sparkHome == null) {
-      throw new InterpreterException("spark.home is undefined");
-    } else {
-      return sparkHome;
-    }
-  }
-
-
-  private void createPythonScript() {
-    ClassLoader classLoader = getClass().getClassLoader();
-    File out = new File(scriptPath);
-
-    if (out.exists() && out.isDirectory()) {
-      throw new InterpreterException("Can't create python script " + out.getAbsolutePath());
-    }
-
-    try {
-      FileOutputStream outStream = new FileOutputStream(out);
-      IOUtils.copy(
-          classLoader.getResourceAsStream("python/zeppelin_pyspark.py"),
-          outStream);
-      outStream.close();
-    } catch (IOException e) {
-      throw new InterpreterException(e);
-    }
-
-    logger.info("File {} created", scriptPath);
-  }
-
-  @Override
-  public void open() {
-    // create python script
-    createPythonScript();
-
-    port = findRandomOpenPortOnAllLocalInterfaces();
-
-    gatewayServer = new GatewayServer(this, port);
-    gatewayServer.start();
-
-    // Run python shell
-    CommandLine cmd = CommandLine.parse(getProperty("zeppelin.pyspark.python"));
-    cmd.addArgument(scriptPath, false);
-    cmd.addArgument(Integer.toString(port), false);
-    executor = new DefaultExecutor();
-    outputStream = new ByteArrayOutputStream();
-    PipedOutputStream ps = new PipedOutputStream();
-    in = null;
-    try {
-      in = new PipedInputStream(ps);
-    } catch (IOException e1) {
-      throw new InterpreterException(e1);
-    }
-    ins = new BufferedWriter(new OutputStreamWriter(ps));
-
-    input = new ByteArrayOutputStream();
-
-    PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream, outputStream, in);
-    executor.setStreamHandler(streamHandler);
-    executor.setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT));
-
-
-    try {
-      Map env = EnvironmentUtils.getProcEnvironment();
-
-      String pythonPath = (String) env.get("PYTHONPATH");
-      if (pythonPath == null) {
-        pythonPath = "";
-      } else {
-        pythonPath += ":";
-      }
-
-      pythonPath += getSparkHome() + "/python/lib/py4j-0.8.2.1-src.zip:"
-          + getSparkHome() + "/python";
-
-      env.put("PYTHONPATH", pythonPath);
-
-      executor.execute(cmd, env, this);
-      pythonscriptRunning = true;
-    } catch (IOException e) {
-      throw new InterpreterException(e);
-    }
-
-
-    try {
-      input.write("import sys, getopt\n".getBytes());
-      ins.flush();
-    } catch (IOException e) {
-      throw new InterpreterException(e);
-    }
-  }
-
-  private int findRandomOpenPortOnAllLocalInterfaces() {
-    int port;
-    try (ServerSocket socket = new ServerSocket(0);) {
-      port = socket.getLocalPort();
-      socket.close();
-    } catch (IOException e) {
-      throw new InterpreterException(e);
-    }
-    return port;
-  }
-
-  @Override
-  public void close() {
-    executor.getWatchdog().destroyProcess();
-    gatewayServer.shutdown();
-  }
-
-  PythonInterpretRequest pythonInterpretRequest = null;
-
-  /**
-   *
-   */
-  public class PythonInterpretRequest {
-    public String statements;
-    public String jobGroup;
-
-    public PythonInterpretRequest(String statements, String jobGroup) {
-      this.statements = statements;
-      this.jobGroup = jobGroup;
-    }
-
-    public String statements() {
-      return statements;
-    }
-
-    public String jobGroup() {
-      return jobGroup;
-    }
-  }
-
-  Integer statementSetNotifier = new Integer(0);
-
-  public PythonInterpretRequest getStatements() {
-    synchronized (statementSetNotifier) {
-      while (pythonInterpretRequest == null) {
-        try {
-          statementSetNotifier.wait(1000);
-        } catch (InterruptedException e) {
-        }
-      }
-      PythonInterpretRequest req = pythonInterpretRequest;
-      pythonInterpretRequest = null;
-      return req;
-    }
-  }
-
-  String statementOutput = null;
-  boolean statementError = false;
-  Integer statementFinishedNotifier = new Integer(0);
-
-  public void setStatementsFinished(String out, boolean error) {
-    synchronized (statementFinishedNotifier) {
-      statementOutput = out;
-      statementError = error;
-      statementFinishedNotifier.notify();
-    }
-
-  }
-
-  boolean pythonScriptInitialized = false;
-  Integer pythonScriptInitializeNotifier = new Integer(0);
-
-  public void onPythonScriptInitialized() {
-    synchronized (pythonScriptInitializeNotifier) {
-      pythonScriptInitialized = true;
-      pythonScriptInitializeNotifier.notifyAll();
-    }
-  }
-
-  @Override
-  public InterpreterResult interpret(String st, InterpreterContext context) {
-    if (!pythonscriptRunning) {
-      return new InterpreterResult(Code.ERROR, "python process not running"
-          + outputStream.toString());
-    }
-
-    outputStream.reset();
-
-    synchronized (pythonScriptInitializeNotifier) {
-      long startTime = System.currentTimeMillis();
-      while (pythonScriptInitialized == false
-          && pythonscriptRunning
-          && System.currentTimeMillis() - startTime < 10 * 1000) {
-        try {
-          pythonScriptInitializeNotifier.wait(1000);
-        } catch (InterruptedException e) {
-        }
-      }
-    }
-
-    if (pythonscriptRunning == false) {
-      // python script failed to initialize and terminated
-      return new InterpreterResult(Code.ERROR, "failed to start pyspark"
-          + outputStream.toString());
-    }
-    if (pythonScriptInitialized == false) {
-      // timeout. didn't get initialized message
-      return new InterpreterResult(Code.ERROR, "pyspark is not responding "
-          + outputStream.toString());
-    }
-
-    SparkInterpreter sparkInterpreter = getSparkInterpreter();
-    if (!sparkInterpreter.getSparkContext().version().startsWith("1.2") &&
-        !sparkInterpreter.getSparkContext().version().startsWith("1.3")) {
-      return new InterpreterResult(Code.ERROR, "pyspark "
-          + sparkInterpreter.getSparkContext().version() + " is not supported");
-    }
-    String jobGroup = sparkInterpreter.getJobGroup(context);
-    ZeppelinContext z = sparkInterpreter.getZeppelinContext();
-    z.setInterpreterContext(context);
-    z.setGui(context.getGui());
-    pythonInterpretRequest = new PythonInterpretRequest(st, jobGroup);
-    statementOutput = null;
-
-    synchronized (statementSetNotifier) {
-      statementSetNotifier.notify();
-    }
-
-    synchronized (statementFinishedNotifier) {
-      while (statementOutput == null) {
-        try {
-          statementFinishedNotifier.wait(1000);
-        } catch (InterruptedException e) {
-        }
-      }
-    }
-
-    if (statementError) {
-      return new InterpreterResult(Code.ERROR, statementOutput);
-    } else {
-      return new InterpreterResult(Code.SUCCESS, statementOutput);
-    }
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {
-    SparkInterpreter sparkInterpreter = getSparkInterpreter();
-    sparkInterpreter.cancel(context);
-  }
-
-  @Override
-  public FormType getFormType() {
-    return FormType.NATIVE;
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    SparkInterpreter sparkInterpreter = getSparkInterpreter();
-    return sparkInterpreter.getProgress(context);
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    // not supported
-    return new LinkedList<String>();
-  }
-
-  private SparkInterpreter getSparkInterpreter() {
-    InterpreterGroup intpGroup = getInterpreterGroup();
-    synchronized (intpGroup) {
-      for (Interpreter intp : getInterpreterGroup()){
-        if (intp.getClassName().equals(SparkInterpreter.class.getName())) {
-          Interpreter p = intp;
-          while (p instanceof WrappedInterpreter) {
-            if (p instanceof LazyOpenInterpreter) {
-              ((LazyOpenInterpreter) p).open();
-            }
-            p = ((WrappedInterpreter) p).getInnerInterpreter();
-          }
-          return (SparkInterpreter) p;
-        }
-      }
-    }
-    return null;
-  }
-
-  public ZeppelinContext getZeppelinContext() {
-    SparkInterpreter sparkIntp = getSparkInterpreter();
-    if (sparkIntp != null) {
-      return getSparkInterpreter().getZeppelinContext();
-    } else {
-      return null;
-    }
-  }
-
-  public JavaSparkContext getJavaSparkContext() {
-    SparkInterpreter intp = getSparkInterpreter();
-    if (intp == null) {
-      return null;
-    } else {
-      return new JavaSparkContext(intp.getSparkContext());
-    }
-  }
-
-  public SparkConf getSparkConf() {
-    JavaSparkContext sc = getJavaSparkContext();
-    if (sc == null) {
-      return null;
-    } else {
-      return getJavaSparkContext().getConf();
-    }
-  }
-
-  public SQLContext getSQLContext() {
-    SparkInterpreter intp = getSparkInterpreter();
-    if (intp == null) {
-      return null;
-    } else {
-      return intp.getSQLContext();
-    }
-  }
-
-
-  @Override
-  public void onProcessComplete(int exitValue) {
-    pythonscriptRunning = false;
-    logger.info("python process terminated. exit code " + exitValue);
-  }
-
-  @Override
-  public void onProcessFailed(ExecuteException e) {
-    pythonscriptRunning = false;
-    logger.error("python process failed", e);
-  }
-}


[05/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/NotebookServer.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/NotebookServer.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/NotebookServer.java
deleted file mode 100644
index fd9960b..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/NotebookServer.java
+++ /dev/null
@@ -1,483 +0,0 @@
-package com.nflabs.zeppelin.socket;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import org.java_websocket.WebSocket;
-import org.java_websocket.handshake.ClientHandshake;
-import org.java_websocket.server.WebSocketServer;
-import org.quartz.SchedulerException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Strings;
-import com.google.gson.Gson;
-import com.nflabs.zeppelin.notebook.JobListenerFactory;
-import com.nflabs.zeppelin.notebook.Note;
-import com.nflabs.zeppelin.notebook.Notebook;
-import com.nflabs.zeppelin.notebook.Paragraph;
-import com.nflabs.zeppelin.scheduler.Job;
-import com.nflabs.zeppelin.scheduler.Job.Status;
-import com.nflabs.zeppelin.scheduler.JobListener;
-import com.nflabs.zeppelin.server.ZeppelinServer;
-import com.nflabs.zeppelin.socket.Message.OP;
-
-/**
- * Zeppelin websocket service.
- *
- * @author anthonycorbacho
- */
-public class NotebookServer extends WebSocketServer implements JobListenerFactory {
-
-  private static final Logger LOG = LoggerFactory.getLogger(NotebookServer.class);
-  private static final int DEFAULT_PORT = 8282;
-
-  private static void creatingwebSocketServerLog(int port) {
-    LOG.info("Create zeppelin websocket on port {}", port);
-  }
-
-  Gson gson = new Gson();
-  Map<String, List<WebSocket>> noteSocketMap = new HashMap<String, List<WebSocket>>();
-  List<WebSocket> connectedSockets = new LinkedList<WebSocket>();
-
-  public NotebookServer() {
-    super(new InetSocketAddress(DEFAULT_PORT));
-    creatingwebSocketServerLog(DEFAULT_PORT);
-  }
-
-  public NotebookServer(int port) {
-    super(new InetSocketAddress(port));
-    creatingwebSocketServerLog(port);
-  }
-
-  private Notebook notebook() {
-    return ZeppelinServer.notebook;
-  }
-
-  @Override
-  public void onOpen(WebSocket conn, ClientHandshake handshake) {
-    LOG.info("New connection from {} : {}", conn.getRemoteSocketAddress().getHostName(), conn
-        .getRemoteSocketAddress().getPort());
-    synchronized (connectedSockets) {
-      connectedSockets.add(conn);
-    }
-  }
-
-  @Override
-  public void onMessage(WebSocket conn, String msg) {
-    Notebook notebook = notebook();
-    try {
-      Message messagereceived = deserializeMessage(msg);
-      LOG.info("RECEIVE << " + messagereceived.op);
-      /** Lets be elegant here */
-      switch (messagereceived.op) {
-          case LIST_NOTES:
-            broadcastNoteList();
-            break;
-          case GET_NOTE:
-            sendNote(conn, notebook, messagereceived);
-            break;
-          case NEW_NOTE:
-            createNote(conn, notebook);
-            break;
-          case DEL_NOTE:
-            removeNote(conn, notebook, messagereceived);
-            break;
-          case COMMIT_PARAGRAPH:
-            updateParagraph(conn, notebook, messagereceived);
-            break;
-          case RUN_PARAGRAPH:
-            runParagraph(conn, notebook, messagereceived);
-            break;
-          case CANCEL_PARAGRAPH:
-            cancelParagraph(conn, notebook, messagereceived);
-            break;
-          case MOVE_PARAGRAPH:
-            moveParagraph(conn, notebook, messagereceived);
-            break;
-          case INSERT_PARAGRAPH:
-            insertParagraph(conn, notebook, messagereceived);
-            break;
-          case PARAGRAPH_REMOVE:
-            removeParagraph(conn, notebook, messagereceived);
-            break;
-          case NOTE_UPDATE:
-            updateNote(conn, notebook, messagereceived);
-            break;
-          case COMPLETION:
-            completion(conn, notebook, messagereceived);
-            break;
-          default:
-            broadcastNoteList();
-            break;
-      }
-    } catch (Exception e) {
-      LOG.error("Can't handle message", e);
-    }
-  }
-
-  @Override
-  public void onClose(WebSocket conn, int code, String reason, boolean remote) {
-    LOG.info("Closed connection to {} : {}", conn.getRemoteSocketAddress().getHostName(), conn
-        .getRemoteSocketAddress().getPort());
-    removeConnectionFromAllNote(conn);
-    synchronized (connectedSockets) {
-      connectedSockets.remove(conn);
-    }
-  }
-
-  @Override
-  public void onError(WebSocket conn, Exception message) {
-    removeConnectionFromAllNote(conn);
-    synchronized (connectedSockets) {
-      connectedSockets.remove(conn);
-    }
-  }
-
-  private Message deserializeMessage(String msg) {
-    Message m = gson.fromJson(msg, Message.class);
-    return m;
-  }
-
-  private String serializeMessage(Message m) {
-    return gson.toJson(m);
-  }
-
-  private void addConnectionToNote(String noteId, WebSocket socket) {
-    synchronized (noteSocketMap) {
-      removeConnectionFromAllNote(socket); // make sure a socket relates only a single note.
-      List<WebSocket> socketList = noteSocketMap.get(noteId);
-      if (socketList == null) {
-        socketList = new LinkedList<WebSocket>();
-        noteSocketMap.put(noteId, socketList);
-      }
-
-      if (socketList.contains(socket) == false) {
-        socketList.add(socket);
-      }
-    }
-  }
-
-  private void removeConnectionFromNote(String noteId, WebSocket socket) {
-    synchronized (noteSocketMap) {
-      List<WebSocket> socketList = noteSocketMap.get(noteId);
-      if (socketList != null) {
-        socketList.remove(socket);
-      }
-    }
-  }
-
-  private void removeNote(String noteId) {
-    synchronized (noteSocketMap) {
-      List<WebSocket> socketList = noteSocketMap.remove(noteId);
-    }
-  }
-
-  private void removeConnectionFromAllNote(WebSocket socket) {
-    synchronized (noteSocketMap) {
-      Set<String> keys = noteSocketMap.keySet();
-      for (String noteId : keys) {
-        removeConnectionFromNote(noteId, socket);
-      }
-    }
-  }
-
-  private String getOpenNoteId(WebSocket socket) {
-    String id = null;
-    synchronized (noteSocketMap) {
-      Set<String> keys = noteSocketMap.keySet();
-      for (String noteId : keys) {
-        List<WebSocket> sockets = noteSocketMap.get(noteId);
-        if (sockets.contains(socket)) {
-          id = noteId;
-        }
-      }
-    }
-    return id;
-  }
-
-  private void broadcast(String noteId, Message m) {
-    LOG.info("SEND >> " + m.op);
-    synchronized (noteSocketMap) {
-      List<WebSocket> socketLists = noteSocketMap.get(noteId);
-      if (socketLists == null || socketLists.size() == 0) {
-        return;
-      }
-      for (WebSocket conn : socketLists) {
-        conn.send(serializeMessage(m));
-      }
-    }
-  }
-
-  private void broadcastAll(Message m) {
-    synchronized (connectedSockets) {
-      for (WebSocket conn : connectedSockets) {
-        conn.send(serializeMessage(m));
-      }
-    }
-  }
-
-  private void broadcastNote(Note note) {
-    broadcast(note.id(), new Message(OP.NOTE).put("note", note));
-  }
-
-  private void broadcastNoteList() {
-    Notebook notebook = notebook();
-    List<Note> notes = notebook.getAllNotes();
-    List<Map<String, String>> notesInfo = new LinkedList<Map<String, String>>();
-    for (Note note : notes) {
-      Map<String, String> info = new HashMap<String, String>();
-      info.put("id", note.id());
-      info.put("name", note.getName());
-      notesInfo.add(info);
-    }
-    broadcastAll(new Message(OP.NOTES_INFO).put("notes", notesInfo));
-  }
-
-  private void sendNote(WebSocket conn, Notebook notebook, Message fromMessage) {
-    String noteId = (String) fromMessage.get("id");
-    if (noteId == null) {
-      return;
-    }
-    Note note = notebook.getNote(noteId);
-    if (note != null) {
-      addConnectionToNote(note.id(), conn);
-      conn.send(serializeMessage(new Message(OP.NOTE).put("note", note)));
-    }
-  }
-
-  private void updateNote(WebSocket conn, Notebook notebook, Message fromMessage)
-      throws SchedulerException, IOException {
-    String noteId = (String) fromMessage.get("id");
-    String name = (String) fromMessage.get("name");
-    Map<String, Object> config = (Map<String, Object>) fromMessage.get("config");
-    if (noteId == null) {
-      return;
-    }
-    if (config == null) {
-      return;
-    }
-    Note note = notebook.getNote(noteId);
-    if (note != null) {
-      boolean cronUpdated = isCronUpdated(config, note.getConfig());
-      note.setName(name);
-      note.setConfig(config);
-
-      if (cronUpdated) {
-        notebook.refreshCron(note.id());
-      }
-      note.persist();
-
-      broadcastNote(note);
-      broadcastNoteList();
-    }
-  }
-
-  private boolean isCronUpdated(Map<String, Object> configA, Map<String, Object> configB) {
-    boolean cronUpdated = false;
-    if (configA.get("cron") != null && configB.get("cron") != null
-        && configA.get("cron").equals(configB.get("cron"))) {
-      cronUpdated = true;
-    } else if (configA.get("cron") == null && configB.get("cron") == null) {
-      cronUpdated = false;
-    } else if (configA.get("cron") != null || configB.get("cron") != null) {
-      cronUpdated = true;
-    }
-    return cronUpdated;
-  }
-
-  private void createNote(WebSocket conn, Notebook notebook) throws IOException {
-    Note note = notebook.createNote();
-    note.addParagraph(); // it's an empty note. so add one paragraph
-    note.persist();
-    broadcastNote(note);
-    broadcastNoteList();
-  }
-
-  private void removeNote(WebSocket conn, Notebook notebook, Message fromMessage)
-      throws IOException {
-    String noteId = (String) fromMessage.get("id");
-    if (noteId == null) {
-      return;
-    }
-    Note note = notebook.getNote(noteId);
-    note.unpersist();
-    notebook.removeNote(noteId);
-    removeNote(noteId);
-    broadcastNoteList();
-  }
-
-  private void updateParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
-      throws IOException {
-    String paragraphId = (String) fromMessage.get("id");
-    if (paragraphId == null) {
-      return;
-    }
-    Map<String, Object> params = (Map<String, Object>) fromMessage.get("params");
-    Map<String, Object> config = (Map<String, Object>) fromMessage.get("config");
-    final Note note = notebook.getNote(getOpenNoteId(conn));
-    Paragraph p = note.getParagraph(paragraphId);
-    p.settings.setParams(params);
-    p.setConfig(config);
-    p.setTitle((String) fromMessage.get("title"));
-    p.setText((String) fromMessage.get("paragraph"));
-    note.persist();
-    broadcast(note.id(), new Message(OP.PARAGRAPH).put("paragraph", p));
-  }
-
-  private void removeParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
-      throws IOException {
-    final String paragraphId = (String) fromMessage.get("id");
-    if (paragraphId == null) {
-      return;
-    }
-    final Note note = notebook.getNote(getOpenNoteId(conn));
-    /** We dont want to remove the last paragraph */
-    if (!note.isLastParagraph(paragraphId)) {
-      note.removeParagraph(paragraphId);
-      note.persist();
-      broadcastNote(note);
-    }
-  }
-
-  private void completion(WebSocket conn, Notebook notebook, Message fromMessage) {
-    String paragraphId = (String) fromMessage.get("id");
-    String buffer = (String) fromMessage.get("buf");
-    int cursor = (int) Double.parseDouble(fromMessage.get("cursor").toString());
-    Message resp = new Message(OP.COMPLETION_LIST).put("id", paragraphId);
-
-    if (paragraphId == null) {
-      conn.send(serializeMessage(resp));
-      return;
-    }
-
-    final Note note = notebook.getNote(getOpenNoteId(conn));
-    List<String> candidates = note.completion(paragraphId, buffer, cursor);
-    resp.put("completions", candidates);
-    conn.send(serializeMessage(resp));
-  }
-
-  private void moveParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
-      throws IOException {
-    final String paragraphId = (String) fromMessage.get("id");
-    if (paragraphId == null) {
-      return;
-    }
-
-    final int newIndex = (int) Double.parseDouble(fromMessage.get("index").toString());
-    final Note note = notebook.getNote(getOpenNoteId(conn));
-    note.moveParagraph(paragraphId, newIndex);
-    note.persist();
-    broadcastNote(note);
-  }
-
-  private void insertParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
-      throws IOException {
-    final int index = (int) Double.parseDouble(fromMessage.get("index").toString());
-
-    final Note note = notebook.getNote(getOpenNoteId(conn));
-    note.insertParagraph(index);
-    note.persist();
-    broadcastNote(note);
-  }
-
-
-  private void cancelParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
-      throws IOException {
-    final String paragraphId = (String) fromMessage.get("id");
-    if (paragraphId == null) {
-      return;
-    }
-
-    final Note note = notebook.getNote(getOpenNoteId(conn));
-    Paragraph p = note.getParagraph(paragraphId);
-    p.abort();
-  }
-
-  private void runParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
-      throws IOException {
-    final String paragraphId = (String) fromMessage.get("id");
-    if (paragraphId == null) {
-      return;
-    }
-    final Note note = notebook.getNote(getOpenNoteId(conn));
-    Paragraph p = note.getParagraph(paragraphId);
-    String text = (String) fromMessage.get("paragraph");
-    p.setText(text);
-    p.setTitle((String) fromMessage.get("title"));
-    Map<String, Object> params = (Map<String, Object>) fromMessage.get("params");
-    p.settings.setParams(params);
-    Map<String, Object> config = (Map<String, Object>) fromMessage.get("config");
-    p.setConfig(config);
-
-    // if it's the last paragraph, let's add a new one
-    boolean isTheLastParagraph = note.getLastParagraph().getId().equals(p.getId());
-    if (!Strings.isNullOrEmpty(text) && isTheLastParagraph) {
-      note.addParagraph();
-    }
-    note.persist();
-    broadcastNote(note);
-
-    try {
-      note.run(paragraphId);
-    }
-    catch (Exception ex) {
-      LOG.error("Exception from run", ex);
-      if (p != null) {
-        p.setReturn(new InterpreterResult(
-          InterpreterResult.Code.ERROR, ex.getMessage()), ex);
-        p.setStatus(Status.ERROR);
-      }
-    }
-  }
-
-  /**
-   * Need description here.
-   *
-   */
-  public static class ParagraphJobListener implements JobListener {
-    private NotebookServer notebookServer;
-    private Note note;
-
-    public ParagraphJobListener(NotebookServer notebookServer, Note note) {
-      this.notebookServer = notebookServer;
-      this.note = note;
-    }
-
-    @Override
-    public void onProgressUpdate(Job job, int progress) {
-      notebookServer.broadcast(note.id(),
-          new Message(OP.PROGRESS).put("id", job.getId()).put("progress", job.progress()));
-    }
-
-    @Override
-    public void beforeStatusChange(Job job, Status before, Status after) {}
-
-    @Override
-    public void afterStatusChange(Job job, Status before, Status after) {
-      if (after == Status.ERROR) {
-        job.getException().printStackTrace();
-      }
-      if (job.isTerminated()) {
-        LOG.info("Job {} is finished", job.getId());
-        try {
-          note.persist();
-        } catch (IOException e) {
-          e.printStackTrace();
-        }
-      }
-      notebookServer.broadcastNote(note);
-    }
-  }
-
-  @Override
-  public JobListener getParagraphJobListener(Note note) {
-    return new ParagraphJobListener(this, note);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/SslWebSocketServerFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/SslWebSocketServerFactory.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/SslWebSocketServerFactory.java
deleted file mode 100644
index b84e1d0..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/SslWebSocketServerFactory.java
+++ /dev/null
@@ -1,61 +0,0 @@
-package com.nflabs.zeppelin.socket;
-
-import org.java_websocket.server.DefaultSSLWebSocketServerFactory;
-import org.java_websocket.SSLSocketChannel2;
-
-import java.io.IOException;
-
-import java.nio.channels.ByteChannel;
-import java.nio.channels.SelectionKey;
-import java.nio.channels.SocketChannel;
-
-import java.util.concurrent.ExecutorService;
-
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.SSLEngine;
-
-/**
- * Extension of the java_websocket library's DefaultSslWebSocketServerFactory
- * to require client side authentication during the SSL handshake
- */
-public class SslWebSocketServerFactory 
-    extends DefaultSSLWebSocketServerFactory {
-
-  protected boolean needClientAuth;
-
-  public SslWebSocketServerFactory(SSLContext sslcontext) {
-    super(sslcontext);
-    initAttributes();
-  }
-
-  public SslWebSocketServerFactory(
-      SSLContext sslcontext,
-      ExecutorService exec) {
-
-    super(sslcontext, exec);
-    initAttributes();
-  }
-
-  protected void initAttributes() {
-    this.needClientAuth = false;
-  }
-
-  @Override
-  public ByteChannel wrapChannel(SocketChannel channel, SelectionKey key)
-      throws IOException {
-
-    SSLEngine sslEngine = sslcontext.createSSLEngine();
-    sslEngine.setUseClientMode(false);
-    sslEngine.setNeedClientAuth(needClientAuth);
-    return new SSLSocketChannel2( channel, sslEngine, exec, key );
-  }
-
-  public boolean getNeedClientAuth() {
-    return needClientAuth;
-  }
-
-  public void setNeedClientAuth(boolean needClientAuth) {
-    this.needClientAuth = needClientAuth;
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/rest/InterpreterRestApi.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/InterpreterRestApi.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/InterpreterRestApi.java
new file mode 100644
index 0000000..1e2ade6
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/InterpreterRestApi.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.rest;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.InterpreterFactory;
+import org.apache.zeppelin.interpreter.InterpreterSetting;
+import org.apache.zeppelin.interpreter.Interpreter.RegisteredInterpreter;
+import org.apache.zeppelin.rest.message.NewInterpreterSettingRequest;
+import org.apache.zeppelin.rest.message.UpdateInterpreterSettingRequest;
+import org.apache.zeppelin.server.JsonResponse;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.wordnik.swagger.annotations.Api;
+import com.wordnik.swagger.annotations.ApiOperation;
+import com.wordnik.swagger.annotations.ApiResponse;
+import com.wordnik.swagger.annotations.ApiResponses;
+
+/**
+ * Interpreter Rest API
+ *
+ */
+@Path("/interpreter")
+@Produces("application/json")
+@Api(value = "/interpreter", description = "Zeppelin Interpreter REST API")
+public class InterpreterRestApi {
+  Logger logger = LoggerFactory.getLogger(InterpreterRestApi.class);
+
+  private InterpreterFactory interpreterFactory;
+
+  Gson gson = new Gson();
+
+  public InterpreterRestApi() {
+
+  }
+
+  public InterpreterRestApi(InterpreterFactory interpreterFactory) {
+    this.interpreterFactory = interpreterFactory;
+  }
+
+  /**
+   * List all interpreter settings
+     * @return
+   */
+  @GET
+  @Path("setting")
+  @ApiOperation(httpMethod = "GET", value = "List all interpreter setting")
+  @ApiResponses(value = {@ApiResponse(code = 500, message = "When something goes wrong")})
+  public Response listSettings() {
+    List<InterpreterSetting> interpreterSettings = null;
+    interpreterSettings = interpreterFactory.get();
+    return new JsonResponse(Status.OK, "", interpreterSettings).build();
+  }
+
+  /**
+   * Add new interpreter setting
+   * @param message
+   * @return
+   * @throws IOException
+   * @throws InterpreterException
+   */
+  @POST
+  @Path("setting")
+  @ApiOperation(httpMethod = "GET", value = "Create new interpreter setting")
+  @ApiResponses(value = {@ApiResponse(code = 201, message = "On success")})
+  public Response newSettings(String message) throws InterpreterException, IOException {
+    NewInterpreterSettingRequest request = gson.fromJson(message,
+        NewInterpreterSettingRequest.class);
+    Properties p = new Properties();
+    p.putAll(request.getProperties());
+    interpreterFactory.add(request.getName(), request.getGroup(), request.getOption(), p);
+    return new JsonResponse(Status.CREATED, "").build();
+  }
+
+  @PUT
+  @Path("setting/{settingId}")
+  public Response updateSetting(String message, @PathParam("settingId") String settingId) {
+    logger.info("Update interpreterSetting {}", settingId);
+
+    try {
+      UpdateInterpreterSettingRequest p = gson.fromJson(message,
+          UpdateInterpreterSettingRequest.class);
+      interpreterFactory.setPropertyAndRestart(settingId, p.getOption(), p.getProperties());
+    } catch (InterpreterException e) {
+      return new JsonResponse(Status.NOT_FOUND, e.getMessage(), e).build();
+    } catch (IOException e) {
+      return new JsonResponse(Status.INTERNAL_SERVER_ERROR, e.getMessage(), e).build();
+    }
+    InterpreterSetting setting = interpreterFactory.get(settingId);
+    if (setting == null) {
+      return new JsonResponse(Status.NOT_FOUND, "", settingId).build();
+    }
+    return new JsonResponse(Status.OK, "", setting).build();
+  }
+
+  @DELETE
+  @Path("setting/{settingId}")
+  @ApiOperation(httpMethod = "GET", value = "Remove interpreter setting")
+  @ApiResponses(value = {@ApiResponse(code = 500, message = "When something goes wrong")})
+  public Response removeSetting(@PathParam("settingId") String settingId) throws IOException {
+    logger.info("Remove interpreterSetting {}", settingId);
+    interpreterFactory.remove(settingId);
+    return new JsonResponse(Status.OK).build();
+  }
+
+  @PUT
+  @Path("setting/restart/{settingId}")
+  @ApiOperation(httpMethod = "GET", value = "restart interpreter setting")
+  @ApiResponses(value = {
+      @ApiResponse(code = 404, message = "Not found")})
+  public Response restartSetting(@PathParam("settingId") String settingId) {
+    logger.info("Restart interpreterSetting {}", settingId);
+    try {
+      interpreterFactory.restart(settingId);
+    } catch (InterpreterException e) {
+      return new JsonResponse(Status.NOT_FOUND, e.getMessage(), e).build();
+    }
+    InterpreterSetting setting = interpreterFactory.get(settingId);
+    if (setting == null) {
+      return new JsonResponse(Status.NOT_FOUND, "", settingId).build();
+    }
+    return new JsonResponse(Status.OK, "", setting).build();
+  }
+
+  /**
+   * List all available interpreters by group
+   */
+  @GET
+  @ApiOperation(httpMethod = "GET", value = "List all available interpreters")
+  @ApiResponses(value = {
+      @ApiResponse(code = 500, message = "When something goes wrong")})
+  public Response listInterpreter(String message) {
+    Map<String, RegisteredInterpreter> m = Interpreter.registeredInterpreters;
+    return new JsonResponse(Status.OK, "", m).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/rest/NotebookResponse.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/NotebookResponse.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/NotebookResponse.java
new file mode 100644
index 0000000..1397ac1
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/NotebookResponse.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.rest;
+
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * Response wrapper.
+ *
+ * @author anthonycorbacho
+ *
+ */
+@XmlRootElement
+public class NotebookResponse {
+  private String msg;
+
+  public NotebookResponse() {}
+
+  public NotebookResponse(String msg) {
+    this.msg = msg;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/rest/NotebookRestApi.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/NotebookRestApi.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/NotebookRestApi.java
new file mode 100644
index 0000000..8a933f7
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/NotebookRestApi.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.rest;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+
+import org.apache.zeppelin.interpreter.InterpreterSetting;
+import org.apache.zeppelin.notebook.Notebook;
+import org.apache.zeppelin.rest.message.InterpreterSettingListForNoteBind;
+import org.apache.zeppelin.server.JsonResponse;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+
+/**
+ * Rest api endpoint for the noteBook.
+ */
+@Path("/notebook")
+@Produces("application/json")
+public class NotebookRestApi {
+  Logger logger = LoggerFactory.getLogger(NotebookRestApi.class);
+  Gson gson = new Gson();
+  private Notebook notebook;
+
+  public NotebookRestApi() {}
+
+  public NotebookRestApi(Notebook notebook) {
+    this.notebook = notebook;
+  }
+
+  /**
+   * bind a setting to note
+   * @throws IOException
+   */
+  @PUT
+  @Path("interpreter/bind/{noteId}")
+  public Response bind(@PathParam("noteId") String noteId, String req) throws IOException {
+    List<String> settingIdList = gson.fromJson(req, new TypeToken<List<String>>(){}.getType());
+    notebook.bindInterpretersToNote(noteId, settingIdList);
+    return new JsonResponse(Status.OK).build();
+  }
+
+  /**
+   * list binded setting
+   */
+  @GET
+  @Path("interpreter/bind/{noteId}")
+  public Response bind(@PathParam("noteId") String noteId) {
+    List<InterpreterSettingListForNoteBind> settingList
+      = new LinkedList<InterpreterSettingListForNoteBind>();
+
+    List<InterpreterSetting> selectedSettings = notebook.getBindedInterpreterSettings(noteId);
+    for (InterpreterSetting setting : selectedSettings) {
+      settingList.add(new InterpreterSettingListForNoteBind(
+          setting.id(),
+          setting.getName(),
+          setting.getGroup(),
+          setting.getInterpreterGroup(),
+          true)
+      );
+    }
+
+    List<InterpreterSetting> availableSettings = notebook.getInterpreterFactory().get();
+    for (InterpreterSetting setting : availableSettings) {
+      boolean selected = false;
+      for (InterpreterSetting selectedSetting : selectedSettings) {
+        if (selectedSetting.id().equals(setting.id())) {
+          selected = true;
+          break;
+        }
+      }
+
+      if (!selected) {
+        settingList.add(new InterpreterSettingListForNoteBind(
+            setting.id(),
+            setting.getName(),
+            setting.getGroup(),
+            setting.getInterpreterGroup(),
+            false)
+        );
+      }
+    }
+    return new JsonResponse(Status.OK, "", settingList).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/rest/ZeppelinRestApi.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/ZeppelinRestApi.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/ZeppelinRestApi.java
new file mode 100644
index 0000000..4fc47a4
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/ZeppelinRestApi.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.rest;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.Response;
+
+import com.wordnik.swagger.annotations.Api;
+
+/**
+ * Zeppelin root rest api endpoint.
+ *
+ * @author anthonycorbacho
+ * @since 0.3.4
+ */
+@Path("/")
+@Api(value = "/", description = "Zeppelin REST API root")
+public class ZeppelinRestApi {
+
+  /**
+   * Required by Swagger.
+   */
+  public ZeppelinRestApi() {
+    super();
+  }
+
+  /**
+   * Get the root endpoint Return always 200.
+   *
+   * @return 200 response
+   */
+  @GET
+  public Response getRoot() {
+    return Response.ok().build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/InterpreterSettingListForNoteBind.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/InterpreterSettingListForNoteBind.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/InterpreterSettingListForNoteBind.java
new file mode 100644
index 0000000..b74054c
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/InterpreterSettingListForNoteBind.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.rest.message;
+
+import java.util.List;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+
+/**
+ * InterpreterSetting information for binding
+ */
+public class InterpreterSettingListForNoteBind {
+  String id;
+  String name;
+  String group;
+  private boolean selected;
+  private List<Interpreter> interpreters;
+
+  public InterpreterSettingListForNoteBind(String id, String name,
+      String group, List<Interpreter> interpreters, boolean selected) {
+    super();
+    this.id = id;
+    this.name = name;
+    this.group = group;
+    this.interpreters = interpreters;
+    this.selected = selected;
+  }
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getGroup() {
+    return group;
+  }
+
+  public void setGroup(String group) {
+    this.group = group;
+  }
+
+  public List<Interpreter> getInterpreterNames() {
+    return interpreters;
+  }
+
+  public void setInterpreterNames(List<Interpreter> interpreters) {
+    this.interpreters = interpreters;
+  }
+
+  public boolean isSelected() {
+    return selected;
+  }
+
+  public void setSelected(boolean selected) {
+    this.selected = selected;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/NewInterpreterSettingRequest.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/NewInterpreterSettingRequest.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/NewInterpreterSettingRequest.java
new file mode 100644
index 0000000..6489a71
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/NewInterpreterSettingRequest.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.rest.message;
+
+import java.util.Map;
+
+import org.apache.zeppelin.interpreter.InterpreterOption;
+
+/**
+ *  NewInterpreterSetting rest api request message
+ *
+ */
+public class NewInterpreterSettingRequest {
+  String name;
+  String group;
+  InterpreterOption option;
+  Map<String, String> properties;
+
+  public NewInterpreterSettingRequest() {
+
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public String getGroup() {
+    return group;
+  }
+
+  public Map<String, String> getProperties() {
+    return properties;
+  }
+
+  public InterpreterOption getOption() {
+    return option;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/UpdateInterpreterSettingRequest.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/UpdateInterpreterSettingRequest.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/UpdateInterpreterSettingRequest.java
new file mode 100644
index 0000000..98f4ab7
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/message/UpdateInterpreterSettingRequest.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.rest.message;
+
+import java.util.Properties;
+
+import org.apache.zeppelin.interpreter.InterpreterOption;
+
+/**
+ *
+ */
+public class UpdateInterpreterSettingRequest {
+  InterpreterOption option;
+  Properties properties;
+
+  public UpdateInterpreterSettingRequest(InterpreterOption option,
+      Properties properties) {
+    super();
+    this.option = option;
+    this.properties = properties;
+  }
+  public InterpreterOption getOption() {
+    return option;
+  }
+  public Properties getProperties() {
+    return properties;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/server/AppScriptServlet.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/server/AppScriptServlet.java b/zeppelin-server/src/main/java/org/apache/zeppelin/server/AppScriptServlet.java
new file mode 100644
index 0000000..8c8f9a7
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/server/AppScriptServlet.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.server;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.eclipse.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.util.resource.Resource;
+
+/**
+ * Simple servlet to dynamically set the Websocket port
+ * in the JavaScript sent to the client
+ */
+public class AppScriptServlet extends DefaultServlet {
+
+  // Hash containing the possible scripts that contain the getPort()
+  // function originally defined in app.js
+  private static Set<String> scriptPaths = new HashSet<String>(
+    Arrays.asList(
+      "/scripts/scripts.js",
+      "/scripts/app.js"
+    )
+  );
+
+  private int websocketPort;
+
+  public AppScriptServlet(int websocketPort) {
+    this.websocketPort = websocketPort;
+  }
+
+  @Override
+  protected void doGet(HttpServletRequest request, HttpServletResponse response)
+      throws ServletException,
+          IOException {
+
+    // Process all requests not for the app script to the parent
+    // class
+    String uri = request.getRequestURI();
+    if (!scriptPaths.contains(uri)) {
+      super.doGet(request, response);
+      return;
+    }
+
+    // Read the script file chunk by chunk
+    Resource scriptFile = getResource(uri);
+    InputStream is = scriptFile.getInputStream();
+    StringBuffer script = new StringBuffer();
+    byte[] buffer = new byte[1024];
+    while (is.available() > 0) {
+      int numRead = is.read(buffer);
+      if (numRead <= 0) {
+        break;
+      }
+      script.append(new String(buffer, 0, numRead, "UTF-8"));
+    }
+
+    // Replace the string "function getPort(){...}" to return
+    // the proper value
+    int startIndex = script.indexOf("function getPort()");
+    int endIndex = script.indexOf("}", startIndex);
+
+    if (startIndex >= 0 && endIndex >= 0) {
+      String replaceString = "function getPort(){return " + websocketPort + "}";
+      script.replace(startIndex, endIndex + 1, replaceString);
+    }
+
+    response.getWriter().println(script.toString());
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/server/CorsFilter.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/server/CorsFilter.java b/zeppelin-server/src/main/java/org/apache/zeppelin/server/CorsFilter.java
new file mode 100644
index 0000000..1524d5b
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/server/CorsFilter.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.server;
+
+import java.io.IOException;
+import java.text.DateFormat;
+import java.util.Date;
+import java.util.Locale;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * Cors filter
+ *
+ */
+public class CorsFilter implements Filter {
+
+  @Override
+  public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain)
+      throws IOException, ServletException {
+    if (((HttpServletRequest) request).getMethod().equals("OPTIONS")) {
+      HttpServletResponse resp = ((HttpServletResponse) response);
+      addCorsHeaders(resp);
+      return;
+    }
+
+    if (response instanceof HttpServletResponse) {
+      HttpServletResponse alteredResponse = ((HttpServletResponse) response);
+      addCorsHeaders(alteredResponse);
+    }
+    filterChain.doFilter(request, response);
+  }
+
+  private void addCorsHeaders(HttpServletResponse response) {
+    response.addHeader("Access-Control-Allow-Origin", "*");
+    response.addHeader("Access-Control-Allow-Credentials", "true");
+    response.addHeader("Access-Control-Allow-Headers", "authorization,Content-Type");
+    response.addHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, HEAD, DELETE");
+    DateFormat fullDateFormatEN =
+        DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL, new Locale("EN", "en"));
+    response.addHeader("Date", fullDateFormatEN.format(new Date()));
+  }
+
+  @Override
+  public void destroy() {}
+
+  @Override
+  public void init(FilterConfig filterConfig) throws ServletException {}
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/server/JsonResponse.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/server/JsonResponse.java b/zeppelin-server/src/main/java/org/apache/zeppelin/server/JsonResponse.java
new file mode 100644
index 0000000..28a3bb8
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/server/JsonResponse.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.server;
+
+import java.util.ArrayList;
+
+import javax.ws.rs.core.NewCookie;
+import javax.ws.rs.core.Response.ResponseBuilder;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterSerializer;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+/**
+ * Json response builder.
+ *
+ * @author Leemoonsoo
+ *
+ * @param <T>
+ */
+public class JsonResponse<T> {
+  private javax.ws.rs.core.Response.Status status;
+  private String message;
+  private T body;
+  transient ArrayList<NewCookie> cookies;
+  transient boolean pretty = false;
+
+  public JsonResponse(javax.ws.rs.core.Response.Status status) {
+    this.status = status;
+    this.message = null;
+    this.body = null;
+
+  }
+
+  public JsonResponse(javax.ws.rs.core.Response.Status status, String message) {
+    this.status = status;
+    this.message = message;
+    this.body = null;
+  }
+
+  public JsonResponse(javax.ws.rs.core.Response.Status status, T body) {
+    this.status = status;
+    this.message = null;
+    this.body = body;
+  }
+
+  public JsonResponse(javax.ws.rs.core.Response.Status status, String message, T body) {
+    this.status = status;
+    this.message = message;
+    this.body = body;
+  }
+
+  public JsonResponse<T> setPretty(boolean pretty) {
+    this.pretty = pretty;
+    return this;
+  }
+
+  /**
+   * Add cookie for building.
+   *
+   * @param newCookie
+   * @return
+   */
+  public JsonResponse<T> addCookie(NewCookie newCookie) {
+    if (cookies == null) {
+      cookies = new ArrayList<NewCookie>();
+    }
+    cookies.add(newCookie);
+
+    return this;
+  }
+
+  /**
+   * Add cookie for building.
+   *
+   * @param name
+   * @param value
+   * @return
+   */
+  public JsonResponse<?> addCookie(String name, String value) {
+    return addCookie(new NewCookie(name, value));
+  }
+
+  @Override
+  public String toString() {
+    GsonBuilder gsonBuilder = new GsonBuilder()
+      .registerTypeAdapter(Interpreter.class, new InterpreterSerializer());
+    if (pretty) {
+      gsonBuilder.setPrettyPrinting();
+    }
+    Gson gson = gsonBuilder.create();
+    return gson.toJson(this);
+  }
+
+  public javax.ws.rs.core.Response.Status getCode() {
+    return status;
+  }
+
+  public void setCode(javax.ws.rs.core.Response.Status status) {
+    this.status = status;
+  }
+
+  public String getMessage() {
+    return message;
+  }
+
+  public void setMessage(String message) {
+    this.message = message;
+  }
+
+  public T getBody() {
+    return body;
+  }
+
+  public void setBody(T body) {
+    this.body = body;
+  }
+
+  public javax.ws.rs.core.Response build() {
+    ResponseBuilder r = javax.ws.rs.core.Response.status(status).entity(this.toString());
+    if (cookies != null) {
+      for (NewCookie nc : cookies) {
+        r.cookie(nc);
+      }
+    }
+    return r.build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java b/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
new file mode 100644
index 0000000..1c9aa03
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
@@ -0,0 +1,327 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.server;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.net.ssl.SSLContext;
+import javax.servlet.DispatcherType;
+import javax.ws.rs.core.Application;
+
+import org.apache.cxf.jaxrs.servlet.CXFNonSpringJaxrsServlet;
+import org.apache.zeppelin.conf.ZeppelinConfiguration;
+import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
+import org.apache.zeppelin.interpreter.InterpreterFactory;
+import org.apache.zeppelin.notebook.Notebook;
+import org.apache.zeppelin.rest.InterpreterRestApi;
+import org.apache.zeppelin.rest.NotebookRestApi;
+import org.apache.zeppelin.rest.ZeppelinRestApi;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.apache.zeppelin.socket.NotebookServer;
+import org.apache.zeppelin.socket.SslWebSocketServerFactory;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.bio.SocketConnector;
+import org.eclipse.jetty.server.handler.ContextHandlerCollection;
+import org.eclipse.jetty.server.session.SessionHandler;
+import org.eclipse.jetty.server.ssl.SslSocketConnector;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.wordnik.swagger.jersey.config.JerseyJaxrsConfig;
+
+/**
+ * Main class of Zeppelin.
+ *
+ * @author Leemoonsoo
+ *
+ */
+
+public class ZeppelinServer extends Application {
+  private static final Logger LOG = LoggerFactory.getLogger(ZeppelinServer.class);
+
+  private SchedulerFactory schedulerFactory;
+  public static Notebook notebook;
+
+  static NotebookServer notebookServer;
+
+  private InterpreterFactory replFactory;
+
+  public static void main(String[] args) throws Exception {
+    ZeppelinConfiguration conf = ZeppelinConfiguration.create();
+    conf.setProperty("args", args);
+
+    final Server jettyServer = setupJettyServer(conf);
+    notebookServer = setupNotebookServer(conf);
+
+    // REST api
+    final ServletContextHandler restApi = setupRestApiContextHandler();
+    /** NOTE: Swagger-core is included via the web.xml in zeppelin-web
+     * But the rest of swagger is configured here
+     */
+    final ServletContextHandler swagger = setupSwaggerContextHandler(conf);
+
+    // Web UI
+    final WebAppContext webApp = setupWebAppContext(conf);
+    //Below is commented since zeppelin-docs module is removed.
+    //final WebAppContext webAppSwagg = setupWebAppSwagger(conf);
+
+    // add all handlers
+    ContextHandlerCollection contexts = new ContextHandlerCollection();
+    //contexts.setHandlers(new Handler[]{swagger, restApi, webApp, webAppSwagg});
+    contexts.setHandlers(new Handler[]{swagger, restApi, webApp});
+    jettyServer.setHandler(contexts);
+
+    notebookServer.start();
+    LOG.info("Start zeppelin server");
+    jettyServer.start();
+    LOG.info("Started");
+
+    Runtime.getRuntime().addShutdownHook(new Thread(){
+      @Override public void run() {
+        LOG.info("Shutting down Zeppelin Server ... ");
+        try {
+          notebook.getInterpreterFactory().close();
+
+          jettyServer.stop();
+          notebookServer.stop();
+        } catch (Exception e) {
+          LOG.error("Error while stopping servlet container", e);
+        }
+        LOG.info("Bye");
+      }
+    });
+
+
+    // when zeppelin is started inside of ide (especially for eclipse)
+    // for graceful shutdown, input any key in console window
+    if (System.getenv("ZEPPELIN_IDENT_STRING") == null) {
+      try {
+        System.in.read();
+      } catch (IOException e) {
+      }
+      System.exit(0);
+    }
+
+    jettyServer.join();
+  }
+
+  private static Server setupJettyServer(ZeppelinConfiguration conf)
+      throws Exception {
+
+    SocketConnector connector;
+    if (conf.useSsl()) {
+      connector = new SslSocketConnector(getSslContextFactory(conf));
+    }
+    else {
+      connector = new SocketConnector();
+    }
+
+    // Set some timeout options to make debugging easier.
+    int timeout = 1000 * 30;
+    connector.setMaxIdleTime(timeout);
+    connector.setSoLingerTime(-1);
+    connector.setPort(conf.getServerPort());
+
+    final Server server = new Server();
+    server.addConnector(connector);
+
+    return server;
+  }
+
+  private static NotebookServer setupNotebookServer(ZeppelinConfiguration conf)
+      throws Exception {
+
+    NotebookServer server = new NotebookServer(conf.getWebSocketPort());
+
+    // Default WebSocketServer uses unencrypted connector, so only need to
+    // change the connector if SSL should be used.
+    if (conf.useSsl()) {
+      SslWebSocketServerFactory wsf = new SslWebSocketServerFactory(getSslContext(conf));
+      wsf.setNeedClientAuth(conf.useClientAuth());
+      server.setWebSocketFactory(wsf);
+    }
+
+    return server;
+  }
+
+  private static SslContextFactory getSslContextFactory(ZeppelinConfiguration conf)
+      throws Exception {
+
+    // Note that the API for the SslContextFactory is different for
+    // Jetty version 9
+    SslContextFactory sslContextFactory = new SslContextFactory();
+
+    // Set keystore
+    sslContextFactory.setKeyStore(conf.getKeyStorePath());
+    sslContextFactory.setKeyStoreType(conf.getKeyStoreType());
+    sslContextFactory.setKeyStorePassword(conf.getKeyStorePassword());
+    sslContextFactory.setKeyManagerPassword(conf.getKeyManagerPassword());
+
+    // Set truststore
+    sslContextFactory.setTrustStore(conf.getTrustStorePath());
+    sslContextFactory.setTrustStoreType(conf.getTrustStoreType());
+    sslContextFactory.setTrustStorePassword(conf.getTrustStorePassword());
+
+    sslContextFactory.setNeedClientAuth(conf.useClientAuth());
+
+    return sslContextFactory;
+  }
+
+  private static SSLContext getSslContext(ZeppelinConfiguration conf)
+      throws Exception {
+
+    SslContextFactory scf = getSslContextFactory(conf);
+    if (!scf.isStarted()) {
+      scf.start();
+    }
+    return scf.getSslContext();
+  }
+
+  private static ServletContextHandler setupRestApiContextHandler() {
+    final ServletHolder cxfServletHolder = new ServletHolder(new CXFNonSpringJaxrsServlet());
+    cxfServletHolder.setInitParameter("javax.ws.rs.Application", ZeppelinServer.class.getName());
+    cxfServletHolder.setName("rest");
+    cxfServletHolder.setForcedPath("rest");
+
+    final ServletContextHandler cxfContext = new ServletContextHandler();
+    cxfContext.setSessionHandler(new SessionHandler());
+    cxfContext.setContextPath("/api");
+    cxfContext.addServlet(cxfServletHolder, "/*");
+
+    cxfContext.addFilter(new FilterHolder(CorsFilter.class), "/*",
+        EnumSet.allOf(DispatcherType.class));
+    return cxfContext;
+  }
+
+  /**
+   * Swagger core handler - Needed for the RestFul api documentation.
+   *
+   * @return ServletContextHandler of Swagger
+   */
+  private static ServletContextHandler setupSwaggerContextHandler(
+    ZeppelinConfiguration conf) {
+
+    // Configure Swagger-core
+    final ServletHolder swaggerServlet =
+        new ServletHolder(new JerseyJaxrsConfig());
+    swaggerServlet.setName("JerseyJaxrsConfig");
+    swaggerServlet.setInitParameter("api.version", "1.0.0");
+    swaggerServlet.setInitParameter(
+        "swagger.api.basepath",
+        "http://localhost:" + conf.getServerPort() + "/api");
+    swaggerServlet.setInitOrder(2);
+
+    // Setup the handler
+    final ServletContextHandler handler = new ServletContextHandler();
+    handler.setSessionHandler(new SessionHandler());
+    // Bind Swagger-core to the url HOST/api-docs
+    handler.addServlet(swaggerServlet, "/api-docs/*");
+
+    // And we are done
+    return handler;
+  }
+
+  private static WebAppContext setupWebAppContext(
+      ZeppelinConfiguration conf) {
+
+    WebAppContext webApp = new WebAppContext();
+    File warPath = new File(conf.getString(ConfVars.ZEPPELIN_WAR));
+    if (warPath.isDirectory()) {
+      // Development mode, read from FS
+      // webApp.setDescriptor(warPath+"/WEB-INF/web.xml");
+      webApp.setResourceBase(warPath.getPath());
+      webApp.setContextPath("/");
+      webApp.setParentLoaderPriority(true);
+    } else {
+      // use packaged WAR
+      webApp.setWar(warPath.getAbsolutePath());
+    }
+    // Explicit bind to root
+    webApp.addServlet(
+      new ServletHolder(new AppScriptServlet(conf.getWebSocketPort())),
+      "/*"
+    );
+    return webApp;
+  }
+
+  /**
+   * Handles the WebApplication for Swagger-ui.
+   *
+   * @return WebAppContext with swagger ui context
+   */
+  /*private static WebAppContext setupWebAppSwagger(
+      ZeppelinConfiguration conf) {
+
+    WebAppContext webApp = new WebAppContext();
+    File warPath = new File(conf.getString(ConfVars.ZEPPELIN_API_WAR));
+
+    if (warPath.isDirectory()) {
+      webApp.setResourceBase(warPath.getPath());
+    } else {
+      webApp.setWar(warPath.getAbsolutePath());
+    }
+    webApp.setContextPath("/docs");
+    webApp.setParentLoaderPriority(true);
+    // Bind swagger-ui to the path HOST/docs
+    webApp.addServlet(new ServletHolder(new DefaultServlet()), "/docs/*");
+    return webApp;
+  }*/
+
+  public ZeppelinServer() throws Exception {
+    ZeppelinConfiguration conf = ZeppelinConfiguration.create();
+
+    this.schedulerFactory = new SchedulerFactory();
+
+    this.replFactory = new InterpreterFactory(conf);
+    notebook = new Notebook(conf, schedulerFactory, replFactory, notebookServer);
+  }
+
+  @Override
+  public Set<Class<?>> getClasses() {
+    Set<Class<?>> classes = new HashSet<Class<?>>();
+    return classes;
+  }
+
+  @Override
+  public java.util.Set<java.lang.Object> getSingletons() {
+    Set<Object> singletons = new HashSet<Object>();
+
+    /** Rest-api root endpoint */
+    ZeppelinRestApi root = new ZeppelinRestApi();
+    singletons.add(root);
+
+    NotebookRestApi notebookApi = new NotebookRestApi(notebook);
+    singletons.add(notebookApi);
+
+    InterpreterRestApi interpreterApi = new InterpreterRestApi(replFactory);
+    singletons.add(interpreterApi);
+
+    return singletons;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/socket/Message.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/socket/Message.java b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/Message.java
new file mode 100644
index 0000000..a7b8b66
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/Message.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.socket;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Zeppelin websocker massage template class.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class Message {
+  /**
+   * Representation of event type.
+   *
+   * @author Leemoonsoo
+   *
+   */
+  public static enum OP {
+    GET_NOTE, // [c-s] client load note
+              // @param id note id
+
+    NOTE, // [s-c] note info
+          // @param note serlialized Note object
+
+    PARAGRAPH, // [s-c] paragraph info
+               // @param paragraph serialized paragraph object
+
+    PROGRESS, // [s-c] progress update
+              // @param id paragraph id
+              // @param progress percentage progress
+
+    NEW_NOTE, // [c-s] create new notebook
+    DEL_NOTE, // [c-s] delete notebook
+              // @param id note id
+    NOTE_UPDATE,
+
+    RUN_PARAGRAPH, // [c-s] run paragraph
+                   // @param id paragraph id
+                  // @param paragraph paragraph content.ie. script
+                  // @param config paragraph config
+                  // @param params paragraph params
+
+    COMMIT_PARAGRAPH, // [c-s] commit paragraph
+                      // @param id paragraph id
+                      // @param title paragraph title
+                      // @param paragraph paragraph content.ie. script
+                      // @param config paragraph config
+                      // @param params paragraph params
+
+    CANCEL_PARAGRAPH, // [c-s] cancel paragraph run
+                      // @param id paragraph id
+
+    MOVE_PARAGRAPH, // [c-s] move paragraph order
+                    // @param id paragraph id
+                    // @param index index the paragraph want to go
+
+    INSERT_PARAGRAPH, // [c-s] create new paragraph below current paragraph
+                      // @param target index
+
+    COMPLETION, // [c-s] ask completion candidates
+                // @param id
+                // @param buf current code
+                // @param cursor cursor position in code
+
+    COMPLETION_LIST, // [s-c] send back completion candidates list
+                     // @param id
+                     // @param completions list of string
+
+    LIST_NOTES, // [c-s] ask list of note
+
+    NOTES_INFO, // [s-c] list of note infos
+                // @param notes serialized List<NoteInfo> object
+
+    PARAGRAPH_REMOVE,
+  }
+
+  public OP op;
+  public Map<String, Object> data = new HashMap<String, Object>();
+
+  public Message(OP op) {
+    this.op = op;
+  }
+
+  public Message put(String k, Object v) {
+    data.put(k, v);
+    return this;
+  }
+
+  public Object get(String k) {
+    return data.get(k);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/socket/NotebookServer.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/socket/NotebookServer.java b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/NotebookServer.java
new file mode 100644
index 0000000..db5733e
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/NotebookServer.java
@@ -0,0 +1,500 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.socket;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.notebook.JobListenerFactory;
+import org.apache.zeppelin.notebook.Note;
+import org.apache.zeppelin.notebook.Notebook;
+import org.apache.zeppelin.notebook.Paragraph;
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.JobListener;
+import org.apache.zeppelin.scheduler.Job.Status;
+import org.apache.zeppelin.server.ZeppelinServer;
+import org.apache.zeppelin.socket.Message.OP;
+import org.java_websocket.WebSocket;
+import org.java_websocket.handshake.ClientHandshake;
+import org.java_websocket.server.WebSocketServer;
+import org.quartz.SchedulerException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Strings;
+import com.google.gson.Gson;
+
+/**
+ * Zeppelin websocket service.
+ *
+ * @author anthonycorbacho
+ */
+public class NotebookServer extends WebSocketServer implements JobListenerFactory {
+
+  private static final Logger LOG = LoggerFactory.getLogger(NotebookServer.class);
+  private static final int DEFAULT_PORT = 8282;
+
+  private static void creatingwebSocketServerLog(int port) {
+    LOG.info("Create zeppelin websocket on port {}", port);
+  }
+
+  Gson gson = new Gson();
+  Map<String, List<WebSocket>> noteSocketMap = new HashMap<String, List<WebSocket>>();
+  List<WebSocket> connectedSockets = new LinkedList<WebSocket>();
+
+  public NotebookServer() {
+    super(new InetSocketAddress(DEFAULT_PORT));
+    creatingwebSocketServerLog(DEFAULT_PORT);
+  }
+
+  public NotebookServer(int port) {
+    super(new InetSocketAddress(port));
+    creatingwebSocketServerLog(port);
+  }
+
+  private Notebook notebook() {
+    return ZeppelinServer.notebook;
+  }
+
+  @Override
+  public void onOpen(WebSocket conn, ClientHandshake handshake) {
+    LOG.info("New connection from {} : {}", conn.getRemoteSocketAddress().getHostName(), conn
+        .getRemoteSocketAddress().getPort());
+    synchronized (connectedSockets) {
+      connectedSockets.add(conn);
+    }
+  }
+
+  @Override
+  public void onMessage(WebSocket conn, String msg) {
+    Notebook notebook = notebook();
+    try {
+      Message messagereceived = deserializeMessage(msg);
+      LOG.info("RECEIVE << " + messagereceived.op);
+      /** Lets be elegant here */
+      switch (messagereceived.op) {
+          case LIST_NOTES:
+            broadcastNoteList();
+            break;
+          case GET_NOTE:
+            sendNote(conn, notebook, messagereceived);
+            break;
+          case NEW_NOTE:
+            createNote(conn, notebook);
+            break;
+          case DEL_NOTE:
+            removeNote(conn, notebook, messagereceived);
+            break;
+          case COMMIT_PARAGRAPH:
+            updateParagraph(conn, notebook, messagereceived);
+            break;
+          case RUN_PARAGRAPH:
+            runParagraph(conn, notebook, messagereceived);
+            break;
+          case CANCEL_PARAGRAPH:
+            cancelParagraph(conn, notebook, messagereceived);
+            break;
+          case MOVE_PARAGRAPH:
+            moveParagraph(conn, notebook, messagereceived);
+            break;
+          case INSERT_PARAGRAPH:
+            insertParagraph(conn, notebook, messagereceived);
+            break;
+          case PARAGRAPH_REMOVE:
+            removeParagraph(conn, notebook, messagereceived);
+            break;
+          case NOTE_UPDATE:
+            updateNote(conn, notebook, messagereceived);
+            break;
+          case COMPLETION:
+            completion(conn, notebook, messagereceived);
+            break;
+          default:
+            broadcastNoteList();
+            break;
+      }
+    } catch (Exception e) {
+      LOG.error("Can't handle message", e);
+    }
+  }
+
+  @Override
+  public void onClose(WebSocket conn, int code, String reason, boolean remote) {
+    LOG.info("Closed connection to {} : {}", conn.getRemoteSocketAddress().getHostName(), conn
+        .getRemoteSocketAddress().getPort());
+    removeConnectionFromAllNote(conn);
+    synchronized (connectedSockets) {
+      connectedSockets.remove(conn);
+    }
+  }
+
+  @Override
+  public void onError(WebSocket conn, Exception message) {
+    removeConnectionFromAllNote(conn);
+    synchronized (connectedSockets) {
+      connectedSockets.remove(conn);
+    }
+  }
+
+  private Message deserializeMessage(String msg) {
+    Message m = gson.fromJson(msg, Message.class);
+    return m;
+  }
+
+  private String serializeMessage(Message m) {
+    return gson.toJson(m);
+  }
+
+  private void addConnectionToNote(String noteId, WebSocket socket) {
+    synchronized (noteSocketMap) {
+      removeConnectionFromAllNote(socket); // make sure a socket relates only a single note.
+      List<WebSocket> socketList = noteSocketMap.get(noteId);
+      if (socketList == null) {
+        socketList = new LinkedList<WebSocket>();
+        noteSocketMap.put(noteId, socketList);
+      }
+
+      if (socketList.contains(socket) == false) {
+        socketList.add(socket);
+      }
+    }
+  }
+
+  private void removeConnectionFromNote(String noteId, WebSocket socket) {
+    synchronized (noteSocketMap) {
+      List<WebSocket> socketList = noteSocketMap.get(noteId);
+      if (socketList != null) {
+        socketList.remove(socket);
+      }
+    }
+  }
+
+  private void removeNote(String noteId) {
+    synchronized (noteSocketMap) {
+      List<WebSocket> socketList = noteSocketMap.remove(noteId);
+    }
+  }
+
+  private void removeConnectionFromAllNote(WebSocket socket) {
+    synchronized (noteSocketMap) {
+      Set<String> keys = noteSocketMap.keySet();
+      for (String noteId : keys) {
+        removeConnectionFromNote(noteId, socket);
+      }
+    }
+  }
+
+  private String getOpenNoteId(WebSocket socket) {
+    String id = null;
+    synchronized (noteSocketMap) {
+      Set<String> keys = noteSocketMap.keySet();
+      for (String noteId : keys) {
+        List<WebSocket> sockets = noteSocketMap.get(noteId);
+        if (sockets.contains(socket)) {
+          id = noteId;
+        }
+      }
+    }
+    return id;
+  }
+
+  private void broadcast(String noteId, Message m) {
+    LOG.info("SEND >> " + m.op);
+    synchronized (noteSocketMap) {
+      List<WebSocket> socketLists = noteSocketMap.get(noteId);
+      if (socketLists == null || socketLists.size() == 0) {
+        return;
+      }
+      for (WebSocket conn : socketLists) {
+        conn.send(serializeMessage(m));
+      }
+    }
+  }
+
+  private void broadcastAll(Message m) {
+    synchronized (connectedSockets) {
+      for (WebSocket conn : connectedSockets) {
+        conn.send(serializeMessage(m));
+      }
+    }
+  }
+
+  private void broadcastNote(Note note) {
+    broadcast(note.id(), new Message(OP.NOTE).put("note", note));
+  }
+
+  private void broadcastNoteList() {
+    Notebook notebook = notebook();
+    List<Note> notes = notebook.getAllNotes();
+    List<Map<String, String>> notesInfo = new LinkedList<Map<String, String>>();
+    for (Note note : notes) {
+      Map<String, String> info = new HashMap<String, String>();
+      info.put("id", note.id());
+      info.put("name", note.getName());
+      notesInfo.add(info);
+    }
+    broadcastAll(new Message(OP.NOTES_INFO).put("notes", notesInfo));
+  }
+
+  private void sendNote(WebSocket conn, Notebook notebook, Message fromMessage) {
+    String noteId = (String) fromMessage.get("id");
+    if (noteId == null) {
+      return;
+    }
+    Note note = notebook.getNote(noteId);
+    if (note != null) {
+      addConnectionToNote(note.id(), conn);
+      conn.send(serializeMessage(new Message(OP.NOTE).put("note", note)));
+    }
+  }
+
+  private void updateNote(WebSocket conn, Notebook notebook, Message fromMessage)
+      throws SchedulerException, IOException {
+    String noteId = (String) fromMessage.get("id");
+    String name = (String) fromMessage.get("name");
+    Map<String, Object> config = (Map<String, Object>) fromMessage.get("config");
+    if (noteId == null) {
+      return;
+    }
+    if (config == null) {
+      return;
+    }
+    Note note = notebook.getNote(noteId);
+    if (note != null) {
+      boolean cronUpdated = isCronUpdated(config, note.getConfig());
+      note.setName(name);
+      note.setConfig(config);
+
+      if (cronUpdated) {
+        notebook.refreshCron(note.id());
+      }
+      note.persist();
+
+      broadcastNote(note);
+      broadcastNoteList();
+    }
+  }
+
+  private boolean isCronUpdated(Map<String, Object> configA, Map<String, Object> configB) {
+    boolean cronUpdated = false;
+    if (configA.get("cron") != null && configB.get("cron") != null
+        && configA.get("cron").equals(configB.get("cron"))) {
+      cronUpdated = true;
+    } else if (configA.get("cron") == null && configB.get("cron") == null) {
+      cronUpdated = false;
+    } else if (configA.get("cron") != null || configB.get("cron") != null) {
+      cronUpdated = true;
+    }
+    return cronUpdated;
+  }
+
+  private void createNote(WebSocket conn, Notebook notebook) throws IOException {
+    Note note = notebook.createNote();
+    note.addParagraph(); // it's an empty note. so add one paragraph
+    note.persist();
+    broadcastNote(note);
+    broadcastNoteList();
+  }
+
+  private void removeNote(WebSocket conn, Notebook notebook, Message fromMessage)
+      throws IOException {
+    String noteId = (String) fromMessage.get("id");
+    if (noteId == null) {
+      return;
+    }
+    Note note = notebook.getNote(noteId);
+    note.unpersist();
+    notebook.removeNote(noteId);
+    removeNote(noteId);
+    broadcastNoteList();
+  }
+
+  private void updateParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
+      throws IOException {
+    String paragraphId = (String) fromMessage.get("id");
+    if (paragraphId == null) {
+      return;
+    }
+    Map<String, Object> params = (Map<String, Object>) fromMessage.get("params");
+    Map<String, Object> config = (Map<String, Object>) fromMessage.get("config");
+    final Note note = notebook.getNote(getOpenNoteId(conn));
+    Paragraph p = note.getParagraph(paragraphId);
+    p.settings.setParams(params);
+    p.setConfig(config);
+    p.setTitle((String) fromMessage.get("title"));
+    p.setText((String) fromMessage.get("paragraph"));
+    note.persist();
+    broadcast(note.id(), new Message(OP.PARAGRAPH).put("paragraph", p));
+  }
+
+  private void removeParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
+      throws IOException {
+    final String paragraphId = (String) fromMessage.get("id");
+    if (paragraphId == null) {
+      return;
+    }
+    final Note note = notebook.getNote(getOpenNoteId(conn));
+    /** We dont want to remove the last paragraph */
+    if (!note.isLastParagraph(paragraphId)) {
+      note.removeParagraph(paragraphId);
+      note.persist();
+      broadcastNote(note);
+    }
+  }
+
+  private void completion(WebSocket conn, Notebook notebook, Message fromMessage) {
+    String paragraphId = (String) fromMessage.get("id");
+    String buffer = (String) fromMessage.get("buf");
+    int cursor = (int) Double.parseDouble(fromMessage.get("cursor").toString());
+    Message resp = new Message(OP.COMPLETION_LIST).put("id", paragraphId);
+
+    if (paragraphId == null) {
+      conn.send(serializeMessage(resp));
+      return;
+    }
+
+    final Note note = notebook.getNote(getOpenNoteId(conn));
+    List<String> candidates = note.completion(paragraphId, buffer, cursor);
+    resp.put("completions", candidates);
+    conn.send(serializeMessage(resp));
+  }
+
+  private void moveParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
+      throws IOException {
+    final String paragraphId = (String) fromMessage.get("id");
+    if (paragraphId == null) {
+      return;
+    }
+
+    final int newIndex = (int) Double.parseDouble(fromMessage.get("index").toString());
+    final Note note = notebook.getNote(getOpenNoteId(conn));
+    note.moveParagraph(paragraphId, newIndex);
+    note.persist();
+    broadcastNote(note);
+  }
+
+  private void insertParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
+      throws IOException {
+    final int index = (int) Double.parseDouble(fromMessage.get("index").toString());
+
+    final Note note = notebook.getNote(getOpenNoteId(conn));
+    note.insertParagraph(index);
+    note.persist();
+    broadcastNote(note);
+  }
+
+
+  private void cancelParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
+      throws IOException {
+    final String paragraphId = (String) fromMessage.get("id");
+    if (paragraphId == null) {
+      return;
+    }
+
+    final Note note = notebook.getNote(getOpenNoteId(conn));
+    Paragraph p = note.getParagraph(paragraphId);
+    p.abort();
+  }
+
+  private void runParagraph(WebSocket conn, Notebook notebook, Message fromMessage)
+      throws IOException {
+    final String paragraphId = (String) fromMessage.get("id");
+    if (paragraphId == null) {
+      return;
+    }
+    final Note note = notebook.getNote(getOpenNoteId(conn));
+    Paragraph p = note.getParagraph(paragraphId);
+    String text = (String) fromMessage.get("paragraph");
+    p.setText(text);
+    p.setTitle((String) fromMessage.get("title"));
+    Map<String, Object> params = (Map<String, Object>) fromMessage.get("params");
+    p.settings.setParams(params);
+    Map<String, Object> config = (Map<String, Object>) fromMessage.get("config");
+    p.setConfig(config);
+
+    // if it's the last paragraph, let's add a new one
+    boolean isTheLastParagraph = note.getLastParagraph().getId().equals(p.getId());
+    if (!Strings.isNullOrEmpty(text) && isTheLastParagraph) {
+      note.addParagraph();
+    }
+    note.persist();
+    broadcastNote(note);
+
+    try {
+      note.run(paragraphId);
+    }
+    catch (Exception ex) {
+      LOG.error("Exception from run", ex);
+      if (p != null) {
+        p.setReturn(new InterpreterResult(
+          InterpreterResult.Code.ERROR, ex.getMessage()), ex);
+        p.setStatus(Status.ERROR);
+      }
+    }
+  }
+
+  /**
+   * Need description here.
+   *
+   */
+  public static class ParagraphJobListener implements JobListener {
+    private NotebookServer notebookServer;
+    private Note note;
+
+    public ParagraphJobListener(NotebookServer notebookServer, Note note) {
+      this.notebookServer = notebookServer;
+      this.note = note;
+    }
+
+    @Override
+    public void onProgressUpdate(Job job, int progress) {
+      notebookServer.broadcast(note.id(),
+          new Message(OP.PROGRESS).put("id", job.getId()).put("progress", job.progress()));
+    }
+
+    @Override
+    public void beforeStatusChange(Job job, Status before, Status after) {}
+
+    @Override
+    public void afterStatusChange(Job job, Status before, Status after) {
+      if (after == Status.ERROR) {
+        job.getException().printStackTrace();
+      }
+      if (job.isTerminated()) {
+        LOG.info("Job {} is finished", job.getId());
+        try {
+          note.persist();
+        } catch (IOException e) {
+          e.printStackTrace();
+        }
+      }
+      notebookServer.broadcastNote(note);
+    }
+  }
+
+  @Override
+  public JobListener getParagraphJobListener(Note note) {
+    return new ParagraphJobListener(this, note);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/org/apache/zeppelin/socket/SslWebSocketServerFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/socket/SslWebSocketServerFactory.java b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/SslWebSocketServerFactory.java
new file mode 100644
index 0000000..f44dc1f
--- /dev/null
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/SslWebSocketServerFactory.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.socket;
+
+import java.io.IOException;
+import java.nio.channels.ByteChannel;
+import java.nio.channels.SelectionKey;
+import java.nio.channels.SocketChannel;
+import java.util.concurrent.ExecutorService;
+
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLEngine;
+
+import org.java_websocket.SSLSocketChannel2;
+import org.java_websocket.server.DefaultSSLWebSocketServerFactory;
+
+/**
+ * Extension of the java_websocket library's DefaultSslWebSocketServerFactory
+ * to require client side authentication during the SSL handshake
+ */
+public class SslWebSocketServerFactory
+    extends DefaultSSLWebSocketServerFactory {
+
+  protected boolean needClientAuth;
+
+  public SslWebSocketServerFactory(SSLContext sslcontext) {
+    super(sslcontext);
+    initAttributes();
+  }
+
+  public SslWebSocketServerFactory(
+      SSLContext sslcontext,
+      ExecutorService exec) {
+
+    super(sslcontext, exec);
+    initAttributes();
+  }
+
+  protected void initAttributes() {
+    this.needClientAuth = false;
+  }
+
+  @Override
+  public ByteChannel wrapChannel(SocketChannel channel, SelectionKey key)
+      throws IOException {
+
+    SSLEngine sslEngine = sslcontext.createSSLEngine();
+    sslEngine.setUseClientMode(false);
+    sslEngine.setNeedClientAuth(needClientAuth);
+    return new SSLSocketChannel2( channel, sslEngine, exec, key );
+  }
+
+  public boolean getNeedClientAuth() {
+    return needClientAuth;
+  }
+
+  public void setNeedClientAuth(boolean needClientAuth) {
+    this.needClientAuth = needClientAuth;
+  }
+}
+


[16/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/SparkInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/SparkInterpreter.java b/spark/src/main/java/com/nflabs/zeppelin/spark/SparkInterpreter.java
deleted file mode 100644
index 89c6e45..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/SparkInterpreter.java
+++ /dev/null
@@ -1,718 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.PrintStream;
-import java.io.PrintWriter;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-import com.nflabs.zeppelin.interpreter.*;
-import org.apache.spark.HttpServer;
-import org.apache.spark.SparkConf;
-import org.apache.spark.SparkContext;
-import org.apache.spark.SparkEnv;
-import org.apache.spark.repl.SparkCommandLine;
-import org.apache.spark.repl.SparkILoop;
-import org.apache.spark.repl.SparkIMain;
-import org.apache.spark.repl.SparkJLineCompletion;
-import org.apache.spark.scheduler.ActiveJob;
-import org.apache.spark.scheduler.DAGScheduler;
-import org.apache.spark.scheduler.Pool;
-import org.apache.spark.scheduler.Stage;
-import org.apache.spark.sql.SQLContext;
-import org.apache.spark.ui.jobs.JobProgressListener;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import scala.Console;
-import scala.Enumeration.Value;
-import scala.None;
-import scala.Some;
-import scala.Tuple2;
-import scala.collection.Iterator;
-import scala.collection.JavaConversions;
-import scala.collection.JavaConverters;
-import scala.collection.mutable.HashMap;
-import scala.collection.mutable.HashSet;
-import scala.tools.nsc.Settings;
-import scala.tools.nsc.interpreter.Completion.Candidates;
-import scala.tools.nsc.interpreter.Completion.ScalaCompleter;
-import scala.tools.nsc.settings.MutableSettings.BooleanSetting;
-import scala.tools.nsc.settings.MutableSettings.PathSetting;
-
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-import com.nflabs.zeppelin.spark.dep.DependencyContext;
-import com.nflabs.zeppelin.spark.dep.DependencyResolver;
-
-/**
- * Spark interpreter for Zeppelin.
- *
- */
-public class SparkInterpreter extends Interpreter {
-  Logger logger = LoggerFactory.getLogger(SparkInterpreter.class);
-
-  static {
-    Interpreter.register(
-        "spark",
-        "spark",
-        SparkInterpreter.class.getName(),
-        new InterpreterPropertyBuilder()
-            .add("spark.app.name", "Zeppelin", "The name of spark application.")
-            .add("master",
-                getSystemDefault("MASTER", "spark.master", "local[*]"),
-                "Spark master uri. ex) spark://masterhost:7077")
-            .add("spark.executor.memory",
-                getSystemDefault(null, "spark.executor.memory", "512m"),
-                "Executor memory per worker instance. ex) 512m, 32g")
-            .add("spark.cores.max",
-                getSystemDefault(null, "spark.cores.max", ""),
-                "Total number of cores to use. Empty value uses all available core.")
-            .add("spark.yarn.jar",
-                getSystemDefault("SPARK_YARN_JAR", "spark.yarn.jar", ""),
-                "The location of the Spark jar file. If you use yarn as a cluster, "
-                + "we should set this value")
-            .add("zeppelin.spark.useHiveContext", "true",
-                 "Use HiveContext instead of SQLContext if it is true.")
-            .add("args", "", "spark commandline args").build());
-
-  }
-
-  private ZeppelinContext z;
-  private SparkILoop interpreter;
-  private SparkIMain intp;
-  private SparkContext sc;
-  private ByteArrayOutputStream out;
-  private SQLContext sqlc;
-  private DependencyResolver dep;
-  private SparkJLineCompletion completor;
-
-  private JobProgressListener sparkListener;
-
-  private Map<String, Object> binder;
-  private SparkEnv env;
-
-
-  public SparkInterpreter(Properties property) {
-    super(property);
-    out = new ByteArrayOutputStream();
-  }
-
-  public SparkInterpreter(Properties property, SparkContext sc) {
-    this(property);
-
-    this.sc = sc;
-    env = SparkEnv.get();
-    sparkListener = setupListeners(this.sc);
-  }
-
-  public synchronized SparkContext getSparkContext() {
-    if (sc == null) {
-      sc = createSparkContext();
-      env = SparkEnv.get();
-      sparkListener = setupListeners(sc);
-    }
-    return sc;
-  }
-
-  public boolean isSparkContextInitialized() {
-    return sc != null;
-  }
-
-  private static JobProgressListener setupListeners(SparkContext context) {
-    JobProgressListener pl = new JobProgressListener(context.getConf());
-    context.listenerBus().addListener(pl);
-    return pl;
-  }
-
-  private boolean useHiveContext() {
-    return Boolean.parseBoolean(getProperty("zeppelin.spark.useHiveContext"));
-  }
-
-  public SQLContext getSQLContext() {
-    if (sqlc == null) {
-      if (useHiveContext()) {
-        String name = "org.apache.spark.sql.hive.HiveContext";
-        Constructor<?> hc;
-        try {
-          hc = getClass().getClassLoader().loadClass(name)
-              .getConstructor(SparkContext.class);
-          sqlc = (SQLContext) hc.newInstance(getSparkContext());
-        } catch (NoSuchMethodException | SecurityException
-            | ClassNotFoundException | InstantiationException
-            | IllegalAccessException | IllegalArgumentException
-            | InvocationTargetException e) {
-          logger.warn("Can't create HiveContext. Fallback to SQLContext", e);
-          // when hive dependency is not loaded, it'll fail.
-          // in this case SQLContext can be used.
-          sqlc = new SQLContext(getSparkContext());
-        }
-      } else {
-        sqlc = new SQLContext(getSparkContext());
-      }
-    }
-
-    return sqlc;
-  }
-
-  public DependencyResolver getDependencyResolver() {
-    if (dep == null) {
-      dep = new DependencyResolver(intp, sc, getProperty("zeppelin.dep.localrepo"));
-    }
-    return dep;
-  }
-
-  private DepInterpreter getDepInterpreter() {
-    InterpreterGroup intpGroup = getInterpreterGroup();
-    if (intpGroup == null) return null;
-    synchronized (intpGroup) {
-      for (Interpreter intp : intpGroup) {
-        if (intp.getClassName().equals(DepInterpreter.class.getName())) {
-          Interpreter p = intp;
-          while (p instanceof WrappedInterpreter) {
-            p = ((WrappedInterpreter) p).getInnerInterpreter();
-          }
-          return (DepInterpreter) p;
-        }
-      }
-    }
-    return null;
-  }
-
-  public SparkContext createSparkContext() {
-    System.err.println("------ Create new SparkContext " + getProperty("master") + " -------");
-
-    String execUri = System.getenv("SPARK_EXECUTOR_URI");
-    String[] jars = SparkILoop.getAddedJars();
-
-    String classServerUri = null;
-
-    try { // in case of spark 1.1x, spark 1.2x
-      Method classServer = interpreter.intp().getClass().getMethod("classServer");
-      HttpServer httpServer = (HttpServer) classServer.invoke(interpreter.intp());
-      classServerUri = httpServer.uri();
-    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
-        | IllegalArgumentException | InvocationTargetException e) {
-      // continue
-    }
-
-    if (classServerUri == null) {
-      try { // for spark 1.3x
-        Method classServer = interpreter.intp().getClass().getMethod("classServerUri");
-        classServerUri = (String) classServer.invoke(interpreter.intp());
-      } catch (NoSuchMethodException | SecurityException | IllegalAccessException
-          | IllegalArgumentException | InvocationTargetException e) {
-        throw new InterpreterException(e);
-      }
-    }
-
-    SparkConf conf =
-        new SparkConf()
-            .setMaster(getProperty("master"))
-            .setAppName(getProperty("spark.app.name"))
-            .setJars(jars)
-            .set("spark.repl.class.uri", classServerUri);
-
-    if (execUri != null) {
-      conf.set("spark.executor.uri", execUri);
-    }
-    if (System.getenv("SPARK_HOME") != null) {
-      conf.setSparkHome(System.getenv("SPARK_HOME"));
-    }
-    conf.set("spark.scheduler.mode", "FAIR");
-
-    Properties intpProperty = getProperty();
-
-    for (Object k : intpProperty.keySet()) {
-      String key = (String) k;
-      Object value = intpProperty.get(key);
-      if (!isEmptyString(value)) {
-        logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, value));
-        conf.set(key, (String) value);
-      }
-    }
-
-    SparkContext sparkContext = new SparkContext(conf);
-    return sparkContext;
-  }
-
-  public static boolean isEmptyString(Object val) {
-    return val instanceof String && ((String) val).trim().isEmpty();
-  }
-
-  public static String getSystemDefault(
-      String envName,
-      String propertyName,
-      String defaultValue) {
-
-    if (envName != null && !envName.isEmpty()) {
-      String envValue = System.getenv().get(envName);
-      if (envValue != null) {
-        return envValue;
-      }
-    }
-
-    if (propertyName != null && !propertyName.isEmpty()) {
-      String propValue = System.getProperty(propertyName);
-      if (propValue != null) {
-        return propValue;
-      }
-    }
-    return defaultValue;
-  }
-
-  @Override
-  public void open() {
-    URL[] urls = getClassloaderUrls();
-
-    // Very nice discussion about how scala compiler handle classpath
-    // https://groups.google.com/forum/#!topic/scala-user/MlVwo2xCCI0
-
-    /*
-     * > val env = new nsc.Settings(errLogger) > env.usejavacp.value = true > val p = new
-     * Interpreter(env) > p.setContextClassLoader > Alternatively you can set the class path through
-     * nsc.Settings.classpath.
-     *
-     * >> val settings = new Settings() >> settings.usejavacp.value = true >>
-     * settings.classpath.value += File.pathSeparator + >> System.getProperty("java.class.path") >>
-     * val in = new Interpreter(settings) { >> override protected def parentClassLoader =
-     * getClass.getClassLoader >> } >> in.setContextClassLoader()
-     */
-    Settings settings = new Settings();
-    if (getProperty("args") != null) {
-      String[] argsArray = getProperty("args").split(" ");
-      LinkedList<String> argList = new LinkedList<String>();
-      for (String arg : argsArray) {
-        argList.add(arg);
-      }
-
-      SparkCommandLine command =
-          new SparkCommandLine(scala.collection.JavaConversions.asScalaBuffer(
-              argList).toList());
-      settings = command.settings();
-    }
-
-    // set classpath for scala compiler
-    PathSetting pathSettings = settings.classpath();
-    String classpath = "";
-    List<File> paths = currentClassPath();
-    for (File f : paths) {
-      if (classpath.length() > 0) {
-        classpath += File.pathSeparator;
-      }
-      classpath += f.getAbsolutePath();
-    }
-
-    if (urls != null) {
-      for (URL u : urls) {
-        if (classpath.length() > 0) {
-          classpath += File.pathSeparator;
-        }
-        classpath += u.getFile();
-      }
-    }
-
-    // add dependency from DepInterpreter
-    DepInterpreter depInterpreter = getDepInterpreter();
-    if (depInterpreter != null) {
-      DependencyContext depc = depInterpreter.getDependencyContext();
-      if (depc != null) {
-        List<File> files = depc.getFiles();
-        if (files != null) {
-          for (File f : files) {
-            if (classpath.length() > 0) {
-              classpath += File.pathSeparator;
-            }
-            classpath += f.getAbsolutePath();
-          }
-        }
-      }
-    }
-
-    pathSettings.v_$eq(classpath);
-    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);
-
-
-    // set classloader for scala compiler
-    settings.explicitParentLoader_$eq(new Some<ClassLoader>(Thread.currentThread()
-        .getContextClassLoader()));
-    BooleanSetting b = (BooleanSetting) settings.usejavacp();
-    b.v_$eq(true);
-    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
-
-    PrintStream printStream = new PrintStream(out);
-
-    /* spark interpreter */
-    this.interpreter = new SparkILoop(null, new PrintWriter(out));
-    interpreter.settings_$eq(settings);
-
-    interpreter.createInterpreter();
-
-    intp = interpreter.intp();
-    intp.setContextClassLoader();
-    intp.initializeSynchronous();
-
-    completor = new SparkJLineCompletion(intp);
-
-    sc = getSparkContext();
-    if (sc.getPoolForName("fair").isEmpty()) {
-      Value schedulingMode = org.apache.spark.scheduler.SchedulingMode.FAIR();
-      int minimumShare = 0;
-      int weight = 1;
-      Pool pool = new Pool("fair", schedulingMode, minimumShare, weight);
-      sc.taskScheduler().rootPool().addSchedulable(pool);
-    }
-
-    sqlc = getSQLContext();
-
-    dep = getDependencyResolver();
-
-    z = new ZeppelinContext(sc, sqlc, null, dep, printStream);
-
-    try {
-      if (sc.version().startsWith("1.1") || sc.version().startsWith("1.2")) {
-        Method loadFiles = this.interpreter.getClass().getMethod("loadFiles", Settings.class);
-        loadFiles.invoke(this.interpreter, settings);
-      } else if (sc.version().startsWith("1.3")) {
-        Method loadFiles = this.interpreter.getClass().getMethod(
-            "org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
-        loadFiles.invoke(this.interpreter, settings);
-      }
-    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
-        | IllegalArgumentException | InvocationTargetException e) {
-      throw new InterpreterException(e);
-    }
-
-
-    intp.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
-    binder = (Map<String, Object>) getValue("_binder");
-    binder.put("sc", sc);
-    binder.put("sqlc", sqlc);
-    binder.put("z", z);
-    binder.put("out", printStream);
-
-    intp.interpret("@transient val z = "
-                 + "_binder.get(\"z\").asInstanceOf[com.nflabs.zeppelin.spark.ZeppelinContext]");
-    intp.interpret("@transient val sc = "
-                 + "_binder.get(\"sc\").asInstanceOf[org.apache.spark.SparkContext]");
-    intp.interpret("@transient val sqlc = "
-                 + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
-    intp.interpret("@transient val sqlContext = "
-                 + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
-    intp.interpret("import org.apache.spark.SparkContext._");
-
-    if (sc.version().startsWith("1.1")) {
-      intp.interpret("import sqlContext._");
-    } else if (sc.version().startsWith("1.2")) {
-      intp.interpret("import sqlContext._");
-    } else if (sc.version().startsWith("1.3")) {
-      intp.interpret("import sqlContext.implicits._");
-      intp.interpret("import sqlContext.sql");
-      intp.interpret("import org.apache.spark.sql.functions._");
-    }
-
-    // add jar
-    if (depInterpreter != null) {
-      DependencyContext depc = depInterpreter.getDependencyContext();
-      if (depc != null) {
-        List<File> files = depc.getFilesDist();
-        if (files != null) {
-          for (File f : files) {
-            if (f.getName().toLowerCase().endsWith(".jar")) {
-              sc.addJar(f.getAbsolutePath());
-              logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
-            } else {
-              sc.addFile(f.getAbsolutePath());
-              logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
-            }
-          }
-        }
-      }
-    }
-  }
-
-  private List<File> currentClassPath() {
-    List<File> paths = classPath(Thread.currentThread().getContextClassLoader());
-    String[] cps = System.getProperty("java.class.path").split(File.pathSeparator);
-    if (cps != null) {
-      for (String cp : cps) {
-        paths.add(new File(cp));
-      }
-    }
-    return paths;
-  }
-
-  private List<File> classPath(ClassLoader cl) {
-    List<File> paths = new LinkedList<File>();
-    if (cl == null) {
-      return paths;
-    }
-
-    if (cl instanceof URLClassLoader) {
-      URLClassLoader ucl = (URLClassLoader) cl;
-      URL[] urls = ucl.getURLs();
-      if (urls != null) {
-        for (URL url : urls) {
-          paths.add(new File(url.getFile()));
-        }
-      }
-    }
-    return paths;
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    ScalaCompleter c = completor.completer();
-    Candidates ret = c.complete(buf, cursor);
-    return scala.collection.JavaConversions.asJavaList(ret.candidates());
-  }
-
-  public Object getValue(String name) {
-    Object ret = intp.valueOfTerm(name);
-    if (ret instanceof None) {
-      return null;
-    } else if (ret instanceof Some) {
-      return ((Some) ret).get();
-    } else {
-      return ret;
-    }
-  }
-
-  String getJobGroup(InterpreterContext context){
-    return "zeppelin-" + this.hashCode() + "-" + context.getParagraphId();
-  }
-
-  /**
-   * Interpret a single line.
-   */
-  @Override
-  public InterpreterResult interpret(String line, InterpreterContext context) {
-    z.setInterpreterContext(context);
-    if (line == null || line.trim().length() == 0) {
-      return new InterpreterResult(Code.SUCCESS);
-    }
-    return interpret(line.split("\n"), context);
-  }
-
-  public InterpreterResult interpret(String[] lines, InterpreterContext context) {
-    synchronized (this) {
-      z.setGui(context.getGui());
-      sc.setJobGroup(getJobGroup(context), "Zeppelin", false);
-      InterpreterResult r = interpretInput(lines);
-      sc.clearJobGroup();
-      return r;
-    }
-  }
-
-  public InterpreterResult interpretInput(String[] lines) {
-    SparkEnv.set(env);
-
-    // add print("") to make sure not finishing with comment
-    // see https://github.com/NFLabs/zeppelin/issues/151
-    String[] linesToRun = new String[lines.length + 1];
-    for (int i = 0; i < lines.length; i++) {
-      linesToRun[i] = lines[i];
-    }
-    linesToRun[lines.length] = "print(\"\")";
-
-    Console.setOut((java.io.PrintStream) binder.get("out"));
-    out.reset();
-    Code r = null;
-    String incomplete = "";
-    for (String s : linesToRun) {
-      scala.tools.nsc.interpreter.Results.Result res = null;
-      try {
-        res = intp.interpret(incomplete + s);
-      } catch (Exception e) {
-        sc.clearJobGroup();
-        logger.info("Interpreter exception", e);
-        return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e));
-      }
-
-      r = getResultCode(res);
-
-      if (r == Code.ERROR) {
-        sc.clearJobGroup();
-        return new InterpreterResult(r, out.toString());
-      } else if (r == Code.INCOMPLETE) {
-        incomplete += s + "\n";
-      } else {
-        incomplete = "";
-      }
-    }
-
-    if (r == Code.INCOMPLETE) {
-      return new InterpreterResult(r, "Incomplete expression");
-    } else {
-      return new InterpreterResult(r, out.toString());
-    }
-  }
-
-
-  @Override
-  public void cancel(InterpreterContext context) {
-    sc.cancelJobGroup(getJobGroup(context));
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    String jobGroup = getJobGroup(context);
-    int completedTasks = 0;
-    int totalTasks = 0;
-
-    DAGScheduler scheduler = sc.dagScheduler();
-    if (scheduler == null) {
-      return 0;
-    }
-    HashSet<ActiveJob> jobs = scheduler.activeJobs();
-    if (jobs == null || jobs.size() == 0) {
-      return 0;
-    }
-    Iterator<ActiveJob> it = jobs.iterator();
-    while (it.hasNext()) {
-      ActiveJob job = it.next();
-      String g = (String) job.properties().get("spark.jobGroup.id");
-
-      if (jobGroup.equals(g)) {
-        int[] progressInfo = null;
-        if (sc.version().startsWith("1.0")) {
-          progressInfo = getProgressFromStage_1_0x(sparkListener, job.finalStage());
-        } else if (sc.version().startsWith("1.1")) {
-          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
-        } else if (sc.version().startsWith("1.2")) {
-          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
-        } else if (sc.version().startsWith("1.3")) {
-          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
-        } else {
-          continue;
-        }
-        totalTasks += progressInfo[0];
-        completedTasks += progressInfo[1];
-      }
-    }
-
-    if (totalTasks == 0) {
-      return 0;
-    }
-    return completedTasks * 100 / totalTasks;
-  }
-
-  private int[] getProgressFromStage_1_0x(JobProgressListener sparkListener, Stage stage) {
-    int numTasks = stage.numTasks();
-    int completedTasks = 0;
-
-    Method method;
-    Object completedTaskInfo = null;
-    try {
-      method = sparkListener.getClass().getMethod("stageIdToTasksComplete");
-      completedTaskInfo =
-          JavaConversions.asJavaMap((HashMap<Object, Object>) method.invoke(sparkListener)).get(
-              stage.id());
-    } catch (NoSuchMethodException | SecurityException e) {
-      logger.error("Error while getting progress", e);
-    } catch (IllegalAccessException e) {
-      logger.error("Error while getting progress", e);
-    } catch (IllegalArgumentException e) {
-      logger.error("Error while getting progress", e);
-    } catch (InvocationTargetException e) {
-      logger.error("Error while getting progress", e);
-    }
-
-    if (completedTaskInfo != null) {
-      completedTasks += (int) completedTaskInfo;
-    }
-    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
-    if (parents != null) {
-      for (Stage s : parents) {
-        int[] p = getProgressFromStage_1_0x(sparkListener, s);
-        numTasks += p[0];
-        completedTasks += p[1];
-      }
-    }
-
-    return new int[] {numTasks, completedTasks};
-  }
-
-  private int[] getProgressFromStage_1_1x(JobProgressListener sparkListener, Stage stage) {
-    int numTasks = stage.numTasks();
-    int completedTasks = 0;
-
-    try {
-      Method stageIdToData = sparkListener.getClass().getMethod("stageIdToData");
-      HashMap<Tuple2<Object, Object>, Object> stageIdData =
-          (HashMap<Tuple2<Object, Object>, Object>) stageIdToData.invoke(sparkListener);
-      Class<?> stageUIDataClass =
-          this.getClass().forName("org.apache.spark.ui.jobs.UIData$StageUIData");
-
-      Method numCompletedTasks = stageUIDataClass.getMethod("numCompleteTasks");
-
-      Set<Tuple2<Object, Object>> keys =
-          JavaConverters.asJavaSetConverter(stageIdData.keySet()).asJava();
-      for (Tuple2<Object, Object> k : keys) {
-        if (stage.id() == (int) k._1()) {
-          Object uiData = stageIdData.get(k).get();
-          completedTasks += (int) numCompletedTasks.invoke(uiData);
-        }
-      }
-    } catch (Exception e) {
-      logger.error("Error on getting progress information", e);
-    }
-
-    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
-    if (parents != null) {
-      for (Stage s : parents) {
-        int[] p = getProgressFromStage_1_1x(sparkListener, s);
-        numTasks += p[0];
-        completedTasks += p[1];
-      }
-    }
-    return new int[] {numTasks, completedTasks};
-  }
-
-  private Code getResultCode(scala.tools.nsc.interpreter.Results.Result r) {
-    if (r instanceof scala.tools.nsc.interpreter.Results.Success$) {
-      return Code.SUCCESS;
-    } else if (r instanceof scala.tools.nsc.interpreter.Results.Incomplete$) {
-      return Code.INCOMPLETE;
-    } else {
-      return Code.ERROR;
-    }
-  }
-
-  @Override
-  public void close() {
-    sc.stop();
-    sc = null;
-
-    intp.close();
-  }
-
-  @Override
-  public FormType getFormType() {
-    return FormType.NATIVE;
-  }
-
-  public JobProgressListener getJobProgressListener() {
-    return sparkListener;
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    return SchedulerFactory.singleton().createOrGetFIFOScheduler(
-      SparkInterpreter.class.getName() + this.hashCode());
-  }
-
-  public ZeppelinContext getZeppelinContext() {
-    return z;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/SparkSqlInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/SparkSqlInterpreter.java b/spark/src/main/java/com/nflabs/zeppelin/spark/SparkSqlInterpreter.java
deleted file mode 100644
index 98947eb..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/SparkSqlInterpreter.java
+++ /dev/null
@@ -1,339 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import com.nflabs.zeppelin.interpreter.*;
-import org.apache.spark.SparkContext;
-import org.apache.spark.scheduler.ActiveJob;
-import org.apache.spark.scheduler.DAGScheduler;
-import org.apache.spark.scheduler.Stage;
-import org.apache.spark.sql.SQLContext;
-import org.apache.spark.sql.SQLContext.QueryExecution;
-import org.apache.spark.sql.catalyst.expressions.Attribute;
-import org.apache.spark.ui.jobs.JobProgressListener;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import scala.Tuple2;
-import scala.collection.Iterator;
-import scala.collection.JavaConversions;
-import scala.collection.JavaConverters;
-import scala.collection.mutable.HashMap;
-import scala.collection.mutable.HashSet;
-
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-/**
- * Spark SQL interpreter for Zeppelin.
- *
- * @author Leemoonsoo
- *
- */
-public class SparkSqlInterpreter extends Interpreter {
-  Logger logger = LoggerFactory.getLogger(SparkSqlInterpreter.class);
-  AtomicInteger num = new AtomicInteger(0);
-
-  static {
-    Interpreter.register(
-        "sql",
-        "spark",
-        SparkSqlInterpreter.class.getName(),
-        new InterpreterPropertyBuilder()
-            .add("zeppelin.spark.maxResult", "10000", "Max number of SparkSQL result to display.")
-            .add("zeppelin.spark.concurrentSQL", "false",
-                "Execute multiple SQL concurrently if set true.")
-            .build());
-  }
-
-  private String getJobGroup(InterpreterContext context){
-    return "zeppelin-" + this.hashCode() + "-" + context.getParagraphId();
-  }
-
-  private int maxResult;
-
-  public SparkSqlInterpreter(Properties property) {
-    super(property);
-  }
-
-  @Override
-  public void open() {
-    this.maxResult = Integer.parseInt(getProperty("zeppelin.spark.maxResult"));
-  }
-
-  private SparkInterpreter getSparkInterpreter() {
-    for (Interpreter intp : getInterpreterGroup()) {
-      if (intp.getClassName().equals(SparkInterpreter.class.getName())) {
-        Interpreter p = intp;
-        while (p instanceof WrappedInterpreter) {
-          if (p instanceof LazyOpenInterpreter) {
-            p.open();
-          }
-          p = ((WrappedInterpreter) p).getInnerInterpreter();
-        }
-        return (SparkInterpreter) p;
-      }
-    }
-    return null;
-  }
-
-  public boolean concurrentSQL() {
-    return Boolean.parseBoolean(getProperty("zeppelin.spark.concurrentSQL"));
-  }
-
-  @Override
-  public void close() {}
-
-  @Override
-  public InterpreterResult interpret(String st, InterpreterContext context) {
-    SQLContext sqlc = null;
-
-    sqlc = getSparkInterpreter().getSQLContext();
-
-    SparkContext sc = sqlc.sparkContext();
-    if (concurrentSQL()) {
-      sc.setLocalProperty("spark.scheduler.pool", "fair");
-    } else {
-      sc.setLocalProperty("spark.scheduler.pool", null);
-    }
-
-    sc.setJobGroup(getJobGroup(context), "Zeppelin", false);
-
-    // SchemaRDD - spark 1.1, 1.2, DataFrame - spark 1.3
-    Object rdd;
-    Object[] rows = null;
-    try {
-      rdd = sqlc.sql(st);
-
-      Method take = rdd.getClass().getMethod("take", int.class);
-      rows = (Object[]) take.invoke(rdd, maxResult + 1);
-    } catch (Exception e) {
-      logger.error("Error", e);
-      sc.clearJobGroup();
-      return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e));
-    }
-
-    String msg = null;
-
-    // get field names
-    Method queryExecution;
-    QueryExecution qe;
-    try {
-      queryExecution = rdd.getClass().getMethod("queryExecution");
-      qe = (QueryExecution) queryExecution.invoke(rdd);
-    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
-        | IllegalArgumentException | InvocationTargetException e) {
-      throw new InterpreterException(e);
-    }
-
-    List<Attribute> columns =
-        scala.collection.JavaConverters.asJavaListConverter(
-            qe.analyzed().output()).asJava();
-
-    for (Attribute col : columns) {
-      if (msg == null) {
-        msg = col.name();
-      } else {
-        msg += "\t" + col.name();
-      }
-    }
-
-    msg += "\n";
-
-    // ArrayType, BinaryType, BooleanType, ByteType, DecimalType, DoubleType, DynamicType,
-    // FloatType, FractionalType, IntegerType, IntegralType, LongType, MapType, NativeType,
-    // NullType, NumericType, ShortType, StringType, StructType
-
-    try {
-      for (int r = 0; r < maxResult && r < rows.length; r++) {
-        Object row = rows[r];
-        Method isNullAt = row.getClass().getMethod("isNullAt", int.class);
-        Method apply = row.getClass().getMethod("apply", int.class);
-
-        for (int i = 0; i < columns.size(); i++) {
-          if (!(Boolean) isNullAt.invoke(row, i)) {
-            msg += apply.invoke(row, i).toString();
-          } else {
-            msg += "null";
-          }
-          if (i != columns.size() - 1) {
-            msg += "\t";
-          }
-        }
-        msg += "\n";
-      }
-    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
-        | IllegalArgumentException | InvocationTargetException e) {
-      throw new InterpreterException(e);
-    }
-
-    if (rows.length > maxResult) {
-      msg += "\n<font color=red>Results are limited by " + maxResult + ".</font>";
-    }
-    InterpreterResult rett = new InterpreterResult(Code.SUCCESS, "%table " + msg);
-    sc.clearJobGroup();
-    return rett;
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {
-    SQLContext sqlc = getSparkInterpreter().getSQLContext();
-    SparkContext sc = sqlc.sparkContext();
-
-    sc.cancelJobGroup(getJobGroup(context));
-  }
-
-  @Override
-  public FormType getFormType() {
-    return FormType.SIMPLE;
-  }
-
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    String jobGroup = getJobGroup(context);
-    SQLContext sqlc = getSparkInterpreter().getSQLContext();
-    SparkContext sc = sqlc.sparkContext();
-    JobProgressListener sparkListener = getSparkInterpreter().getJobProgressListener();
-    int completedTasks = 0;
-    int totalTasks = 0;
-
-    DAGScheduler scheduler = sc.dagScheduler();
-    HashSet<ActiveJob> jobs = scheduler.activeJobs();
-    Iterator<ActiveJob> it = jobs.iterator();
-    while (it.hasNext()) {
-      ActiveJob job = it.next();
-      String g = (String) job.properties().get("spark.jobGroup.id");
-      if (jobGroup.equals(g)) {
-        int[] progressInfo = null;
-        if (sc.version().startsWith("1.0")) {
-          progressInfo = getProgressFromStage_1_0x(sparkListener, job.finalStage());
-        } else if (sc.version().startsWith("1.1")) {
-          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
-        } else if (sc.version().startsWith("1.2")) {
-          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
-        } else if (sc.version().startsWith("1.3")) {
-          progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
-        } else {
-          logger.warn("Spark {} getting progress information not supported" + sc.version());
-          continue;
-        }
-        totalTasks += progressInfo[0];
-        completedTasks += progressInfo[1];
-      }
-    }
-
-    if (totalTasks == 0) {
-      return 0;
-    }
-    return completedTasks * 100 / totalTasks;
-  }
-
-  private int[] getProgressFromStage_1_0x(JobProgressListener sparkListener, Stage stage) {
-    int numTasks = stage.numTasks();
-    int completedTasks = 0;
-
-    Method method;
-    Object completedTaskInfo = null;
-    try {
-      method = sparkListener.getClass().getMethod("stageIdToTasksComplete");
-      completedTaskInfo =
-          JavaConversions.asJavaMap((HashMap<Object, Object>) method.invoke(sparkListener)).get(
-              stage.id());
-    } catch (NoSuchMethodException | SecurityException e) {
-      logger.error("Error while getting progress", e);
-    } catch (IllegalAccessException e) {
-      logger.error("Error while getting progress", e);
-    } catch (IllegalArgumentException e) {
-      logger.error("Error while getting progress", e);
-    } catch (InvocationTargetException e) {
-      logger.error("Error while getting progress", e);
-    }
-
-    if (completedTaskInfo != null) {
-      completedTasks += (int) completedTaskInfo;
-    }
-    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
-    if (parents != null) {
-      for (Stage s : parents) {
-        int[] p = getProgressFromStage_1_0x(sparkListener, s);
-        numTasks += p[0];
-        completedTasks += p[1];
-      }
-    }
-
-    return new int[] {numTasks, completedTasks};
-  }
-
-  private int[] getProgressFromStage_1_1x(JobProgressListener sparkListener, Stage stage) {
-    int numTasks = stage.numTasks();
-    int completedTasks = 0;
-
-    try {
-      Method stageIdToData = sparkListener.getClass().getMethod("stageIdToData");
-      HashMap<Tuple2<Object, Object>, Object> stageIdData =
-          (HashMap<Tuple2<Object, Object>, Object>) stageIdToData.invoke(sparkListener);
-      Class<?> stageUIDataClass =
-          this.getClass().forName("org.apache.spark.ui.jobs.UIData$StageUIData");
-
-      Method numCompletedTasks = stageUIDataClass.getMethod("numCompleteTasks");
-
-      Set<Tuple2<Object, Object>> keys =
-          JavaConverters.asJavaSetConverter(stageIdData.keySet()).asJava();
-      for (Tuple2<Object, Object> k : keys) {
-        if (stage.id() == (int) k._1()) {
-          Object uiData = stageIdData.get(k).get();
-          completedTasks += (int) numCompletedTasks.invoke(uiData);
-        }
-      }
-    } catch (Exception e) {
-      logger.error("Error on getting progress information", e);
-    }
-
-    List<Stage> parents = JavaConversions.asJavaList(stage.parents());
-    if (parents != null) {
-      for (Stage s : parents) {
-        int[] p = getProgressFromStage_1_1x(sparkListener, s);
-        numTasks += p[0];
-        completedTasks += p[1];
-      }
-    }
-    return new int[] {numTasks, completedTasks};
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    if (concurrentSQL()) {
-      int maxConcurrency = 10;
-      return SchedulerFactory.singleton().createOrGetParallelScheduler(
-          SparkSqlInterpreter.class.getName() + this.hashCode(), maxConcurrency);
-    } else {
-      // getSparkInterpreter() calls open() inside.
-      // That means if SparkInterpreter is not opened, it'll wait until SparkInterpreter open.
-      // In this moment UI displays 'READY' or 'FINISHED' instead of 'PENDING' or 'RUNNING'.
-      // It's because of scheduler is not created yet, and scheduler is created by this function.
-      // Therefore, we can still use getSparkInterpreter() here, but it's better and safe
-      // to getSparkInterpreter without opening it.
-      for (Interpreter intp : getInterpreterGroup()) {
-        if (intp.getClassName().equals(SparkInterpreter.class.getName())) {
-          Interpreter p = intp;
-          return p.getScheduler();
-        } else {
-          continue;
-        }
-      }
-      throw new InterpreterException("Can't find SparkInterpreter");
-    }
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    return null;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/ZeppelinContext.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/ZeppelinContext.java b/spark/src/main/java/com/nflabs/zeppelin/spark/ZeppelinContext.java
deleted file mode 100644
index 30f6015..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/ZeppelinContext.java
+++ /dev/null
@@ -1,238 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import static scala.collection.JavaConversions.asJavaCollection;
-import static scala.collection.JavaConversions.asJavaIterable;
-import static scala.collection.JavaConversions.collectionAsScalaIterable;
-
-import java.io.PrintStream;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-
-import org.apache.spark.SparkContext;
-import org.apache.spark.sql.SQLContext;
-import org.apache.spark.sql.hive.HiveContext;
-
-import scala.Tuple2;
-import scala.collection.Iterable;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.display.Input.ParamOption;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.spark.dep.DependencyResolver;
-
-/**
- * Spark context for zeppelin.
- *
- * @author Leemoonsoo
- *
- */
-public class ZeppelinContext extends HashMap<String, Object> {
-  private DependencyResolver dep;
-  private PrintStream out;
-  private InterpreterContext interpreterContext;
-
-  public ZeppelinContext(SparkContext sc, SQLContext sql,
-      InterpreterContext interpreterContext,
-      DependencyResolver dep, PrintStream printStream) {
-    this.sc = sc;
-    this.sqlContext = sql;
-    this.interpreterContext = interpreterContext;
-    this.dep = dep;
-    this.out = printStream;
-  }
-
-  public SparkContext sc;
-  public SQLContext sqlContext;
-  public HiveContext hiveContext;
-  private GUI gui;
-
-  /* spark-1.3
-  public SchemaRDD sql(String sql) {
-    return sqlContext.sql(sql);
-  }
-  */
-
-  /**
-   * Load dependency for interpreter and runtime (driver).
-   * And distribute them to spark cluster (sc.add())
-   *
-   * @param artifact "group:artifact:version" or file path like "/somepath/your.jar"
-   * @return
-   * @throws Exception
-   */
-  public Iterable<String> load(String artifact) throws Exception {
-    return collectionAsScalaIterable(dep.load(artifact, true));
-  }
-
-  /**
-   * Load dependency and it's transitive dependencies for interpreter and runtime (driver).
-   * And distribute them to spark cluster (sc.add())
-   *
-   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
-   * @param excludes exclusion list of transitive dependency. list of "groupId:artifactId" string.
-   * @return
-   * @throws Exception
-   */
-  public Iterable<String> load(String artifact, scala.collection.Iterable<String> excludes)
-      throws Exception {
-    return collectionAsScalaIterable(
-        dep.load(artifact,
-        asJavaCollection(excludes),
-        true));
-  }
-
-  /**
-   * Load dependency and it's transitive dependencies for interpreter and runtime (driver).
-   * And distribute them to spark cluster (sc.add())
-   *
-   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
-   * @param excludes exclusion list of transitive dependency. list of "groupId:artifactId" string.
-   * @return
-   * @throws Exception
-   */
-  public Iterable<String> load(String artifact, Collection<String> excludes) throws Exception {
-    return collectionAsScalaIterable(dep.load(artifact, excludes, true));
-  }
-
-  /**
-   * Load dependency for interpreter and runtime, and then add to sparkContext.
-   * But not adding them to spark cluster
-   *
-   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
-   * @return
-   * @throws Exception
-   */
-  public Iterable<String> loadLocal(String artifact) throws Exception {
-    return collectionAsScalaIterable(dep.load(artifact, false));
-  }
-
-
-  /**
-   * Load dependency and it's transitive dependencies and then add to sparkContext.
-   * But not adding them to spark cluster
-   *
-   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
-   * @param excludes exclusion list of transitive dependency. list of "groupId:artifactId" string.
-   * @return
-   * @throws Exception
-   */
-  public Iterable<String> loadLocal(String artifact,
-      scala.collection.Iterable<String> excludes) throws Exception {
-    return collectionAsScalaIterable(dep.load(artifact,
-        asJavaCollection(excludes), false));
-  }
-
-  /**
-   * Load dependency and it's transitive dependencies and then add to sparkContext.
-   * But not adding them to spark cluster
-   *
-   * @param artifact "groupId:artifactId:version" or file path like "/somepath/your.jar"
-   * @param excludes exclusion list of transitive dependency. list of "groupId:artifactId" string.
-   * @return
-   * @throws Exception
-   */
-  public Iterable<String> loadLocal(String artifact, Collection<String> excludes)
-      throws Exception {
-    return collectionAsScalaIterable(dep.load(artifact, excludes, false));
-  }
-
-
-  /**
-   * Add maven repository
-   *
-   * @param id id of repository ex) oss, local, snapshot
-   * @param url url of repository. supported protocol : file, http, https
-   */
-  public void addRepo(String id, String url) {
-    addRepo(id, url, false);
-  }
-
-  /**
-   * Add maven repository
-   *
-   * @param id id of repository
-   * @param url url of repository. supported protocol : file, http, https
-   * @param snapshot true if it is snapshot repository
-   */
-  public void addRepo(String id, String url, boolean snapshot) {
-    dep.addRepo(id, url, snapshot);
-  }
-
-  /**
-   * Remove maven repository by id
-   * @param id id of repository
-   */
-  public void removeRepo(String id){
-    dep.delRepo(id);
-  }
-
-  /**
-   * Load dependency only interpreter.
-   *
-   * @param name
-   * @return
-   */
-
-  public Object input(String name) {
-    return input(name, "");
-  }
-
-  public Object input(String name, Object defaultValue) {
-    return gui.input(name, defaultValue);
-  }
-
-  public Object select(String name, scala.collection.Iterable<Tuple2<Object, String>> options) {
-    return select(name, "", options);
-  }
-
-  public Object select(String name, Object defaultValue,
-      scala.collection.Iterable<Tuple2<Object, String>> options) {
-    int n = options.size();
-    ParamOption[] paramOptions = new ParamOption[n];
-    Iterator<Tuple2<Object, String>> it = asJavaIterable(options).iterator();
-
-    int i = 0;
-    while (it.hasNext()) {
-      Tuple2<Object, String> valueAndDisplayValue = it.next();
-      paramOptions[i++] = new ParamOption(valueAndDisplayValue._1(), valueAndDisplayValue._2());
-    }
-
-    return gui.select(name, "", paramOptions);
-  }
-
-  public void setGui(GUI o) {
-    this.gui = o;
-  }
-
-  public void run(String lines) {
-    /*
-    String intpName = Paragraph.getRequiredReplName(lines);
-    String scriptBody = Paragraph.getScriptBody(lines);
-    Interpreter intp = interpreterContext.getParagraph().getRepl(intpName);
-    InterpreterResult ret = intp.interpret(scriptBody, interpreterContext);
-    if (ret.code() == InterpreterResult.Code.SUCCESS) {
-      out.println("%" + ret.type().toString().toLowerCase() + " " + ret.message());
-    } else if (ret.code() == InterpreterResult.Code.ERROR) {
-      out.println("Error: " + ret.message());
-    } else if (ret.code() == InterpreterResult.Code.INCOMPLETE) {
-      out.println("Incomplete");
-    } else {
-      out.println("Unknown error");
-    }
-    */
-    throw new RuntimeException("Missing implementation");
-  }
-
-  private void restartInterpreter() {
-  }
-
-  public InterpreterContext getInterpreterContext() {
-    return interpreterContext;
-  }
-
-  public void setInterpreterContext(InterpreterContext interpreterContext) {
-    this.interpreterContext = interpreterContext;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Booter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Booter.java b/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Booter.java
deleted file mode 100644
index 10c5bc2..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Booter.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-import java.io.File;
-
-import org.apache.maven.repository.internal.MavenRepositorySystemSession;
-import org.sonatype.aether.RepositorySystem;
-import org.sonatype.aether.RepositorySystemSession;
-import org.sonatype.aether.repository.LocalRepository;
-import org.sonatype.aether.repository.RemoteRepository;
-
-/**
- * Manage mvn repository.
- *
- * @author anthonycorbacho
- *
- */
-public class Booter {
-  public static RepositorySystem newRepositorySystem() {
-    return RepositorySystemFactory.newRepositorySystem();
-  }
-
-  public static RepositorySystemSession newRepositorySystemSession(
-      RepositorySystem system, String localRepoPath) {
-    MavenRepositorySystemSession session = new MavenRepositorySystemSession();
-
-    // find homedir
-    String home = System.getenv("ZEPPELIN_HOME");
-    if (home == null) {
-      home = System.getProperty("zeppelin.home");
-    }
-    if (home == null) {
-      home = "..";
-    }
-
-    String path = home + "/" + localRepoPath;
-
-    LocalRepository localRepo =
-        new LocalRepository(new File(path).getAbsolutePath());
-    session.setLocalRepositoryManager(system.newLocalRepositoryManager(localRepo));
-
-    // session.setTransferListener(new ConsoleTransferListener());
-    // session.setRepositoryListener(new ConsoleRepositoryListener());
-
-    // uncomment to generate dirty trees
-    // session.setDependencyGraphTransformer( null );
-
-    return session;
-  }
-
-  public static RemoteRepository newCentralRepository() {
-    return new RemoteRepository("central", "default", "http://repo1.maven.org/maven2/");
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Dependency.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Dependency.java b/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Dependency.java
deleted file mode 100644
index f8f6494..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Dependency.java
+++ /dev/null
@@ -1,73 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- *
- */
-public class Dependency {
-  private String groupArtifactVersion;
-  private boolean local = false;
-  private List<String> exclusions;
-
-
-  public Dependency(String groupArtifactVersion) {
-    this.groupArtifactVersion = groupArtifactVersion;
-    exclusions = new LinkedList<String>();
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (!(o instanceof Dependency)) {
-      return false;
-    } else {
-      return ((Dependency) o).groupArtifactVersion.equals(groupArtifactVersion);
-    }
-  }
-
-  /**
-   * Don't add artifact into SparkContext (sc.addJar())
-   * @return
-   */
-  public Dependency local() {
-    local = true;
-    return this;
-  }
-
-  public Dependency excludeAll() {
-    exclude("*");
-    return this;
-  }
-
-  /**
-   *
-   * @param exclusions comma or newline separated list of "groupId:ArtifactId"
-   * @return
-   */
-  public Dependency exclude(String exclusions) {
-    for (String item : exclusions.split(",|\n")) {
-      this.exclusions.add(item);
-    }
-
-    return this;
-  }
-
-
-  public String getGroupArtifactVersion() {
-    return groupArtifactVersion;
-  }
-
-  public boolean isDist() {
-    return !local;
-  }
-
-  public List<String> getExclusions() {
-    return exclusions;
-  }
-
-  public boolean isLocalFsArtifact() {
-    int numSplits = groupArtifactVersion.split(":").length;
-    return !(numSplits >= 3 && numSplits <= 6);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/dep/DependencyContext.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/DependencyContext.java b/spark/src/main/java/com/nflabs/zeppelin/spark/dep/DependencyContext.java
deleted file mode 100644
index 58268eb..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/DependencyContext.java
+++ /dev/null
@@ -1,134 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.LinkedList;
-import java.util.List;
-
-import org.sonatype.aether.RepositorySystem;
-import org.sonatype.aether.RepositorySystemSession;
-import org.sonatype.aether.artifact.Artifact;
-import org.sonatype.aether.collection.CollectRequest;
-import org.sonatype.aether.graph.DependencyFilter;
-import org.sonatype.aether.repository.RemoteRepository;
-import org.sonatype.aether.resolution.ArtifactResolutionException;
-import org.sonatype.aether.resolution.ArtifactResult;
-import org.sonatype.aether.resolution.DependencyRequest;
-import org.sonatype.aether.resolution.DependencyResolutionException;
-import org.sonatype.aether.util.artifact.DefaultArtifact;
-import org.sonatype.aether.util.artifact.JavaScopes;
-import org.sonatype.aether.util.filter.DependencyFilterUtils;
-import org.sonatype.aether.util.filter.PatternExclusionsDependencyFilter;
-
-
-/**
- *
- */
-public class DependencyContext {
-  List<Dependency> dependencies = new LinkedList<Dependency>();
-  List<Repository> repositories = new LinkedList<Repository>();
-
-  List<File> files = new LinkedList<File>();
-  List<File> filesDist = new LinkedList<File>();
-  private RepositorySystem system = Booter.newRepositorySystem();
-  private RepositorySystemSession session;
-  private RemoteRepository mavenCentral = new RemoteRepository("central",
-      "default", "http://repo1.maven.org/maven2/");
-  private RemoteRepository mavenLocal = new RemoteRepository("local",
-      "default", "file://" + System.getProperty("user.home") + "/.m2/repository");
-
-  public DependencyContext(String localRepoPath) {
-    session =  Booter.newRepositorySystemSession(system, localRepoPath);
-  }
-
-  public Dependency load(String lib) {
-    Dependency dep = new Dependency(lib);
-
-    if (dependencies.contains(dep)) {
-      dependencies.remove(dep);
-    }
-    dependencies.add(dep);
-    return dep;
-  }
-
-  public Repository addRepo(String name) {
-    Repository rep = new Repository(name);
-    repositories.add(rep);
-    return rep;
-  }
-
-  public void reset() {
-    dependencies = new LinkedList<Dependency>();
-    repositories = new LinkedList<Repository>();
-
-    files = new LinkedList<File>();
-    filesDist = new LinkedList<File>();
-  }
-
-
-  /**
-   * fetch all artifacts
-   * @return
-   * @throws MalformedURLException
-   * @throws ArtifactResolutionException
-   * @throws DependencyResolutionException
-   */
-  public List<File> fetch() throws MalformedURLException,
-      DependencyResolutionException, ArtifactResolutionException {
-
-    for (Dependency dep : dependencies) {
-      if (!dep.isLocalFsArtifact()) {
-        List<ArtifactResult> artifacts = fetchArtifactWithDep(dep);
-        for (ArtifactResult artifact : artifacts) {
-          if (dep.isDist()) {
-            filesDist.add(artifact.getArtifact().getFile());
-          }
-          files.add(artifact.getArtifact().getFile());
-        }
-      } else {
-        if (dep.isDist()) {
-          filesDist.add(new File(dep.getGroupArtifactVersion()));
-        }
-        files.add(new File(dep.getGroupArtifactVersion()));
-      }
-    }
-
-    return files;
-  }
-
-  private List<ArtifactResult> fetchArtifactWithDep(Dependency dep)
-      throws DependencyResolutionException, ArtifactResolutionException {
-    Artifact artifact = new DefaultArtifact(
-        DependencyResolver.inferScalaVersion(dep.getGroupArtifactVersion()));
-
-    DependencyFilter classpathFlter = DependencyFilterUtils
-        .classpathFilter(JavaScopes.COMPILE);
-    PatternExclusionsDependencyFilter exclusionFilter = new PatternExclusionsDependencyFilter(
-        DependencyResolver.inferScalaVersion(dep.getExclusions()));
-
-    CollectRequest collectRequest = new CollectRequest();
-    collectRequest.setRoot(new org.sonatype.aether.graph.Dependency(artifact,
-        JavaScopes.COMPILE));
-
-    collectRequest.addRepository(mavenCentral);
-    collectRequest.addRepository(mavenLocal);
-    for (Repository repo : repositories) {
-      RemoteRepository rr = new RemoteRepository(repo.getName(), "default", repo.getUrl());
-      rr.setPolicy(repo.isSnapshot(), null);
-      collectRequest.addRepository(rr);
-    }
-
-    DependencyRequest dependencyRequest = new DependencyRequest(collectRequest,
-        DependencyFilterUtils.andFilter(exclusionFilter, classpathFlter));
-
-    return system.resolveDependencies(session, dependencyRequest).getArtifactResults();
-  }
-
-  public List<File> getFiles() {
-    return files;
-  }
-
-  public List<File> getFilesDist() {
-    return filesDist;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/dep/DependencyResolver.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/DependencyResolver.java b/spark/src/main/java/com/nflabs/zeppelin/spark/dep/DependencyResolver.java
deleted file mode 100644
index 4800e1a..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/DependencyResolver.java
+++ /dev/null
@@ -1,333 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-import java.io.File;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.spark.SparkContext;
-import org.apache.spark.repl.SparkIMain;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.sonatype.aether.RepositorySystem;
-import org.sonatype.aether.RepositorySystemSession;
-import org.sonatype.aether.artifact.Artifact;
-import org.sonatype.aether.collection.CollectRequest;
-import org.sonatype.aether.graph.Dependency;
-import org.sonatype.aether.graph.DependencyFilter;
-import org.sonatype.aether.repository.RemoteRepository;
-import org.sonatype.aether.resolution.ArtifactResult;
-import org.sonatype.aether.resolution.DependencyRequest;
-import org.sonatype.aether.util.artifact.DefaultArtifact;
-import org.sonatype.aether.util.artifact.JavaScopes;
-import org.sonatype.aether.util.filter.DependencyFilterUtils;
-import org.sonatype.aether.util.filter.PatternExclusionsDependencyFilter;
-
-import scala.Some;
-import scala.collection.IndexedSeq;
-import scala.reflect.io.AbstractFile;
-import scala.tools.nsc.Global;
-import scala.tools.nsc.backend.JavaPlatform;
-import scala.tools.nsc.util.ClassPath;
-import scala.tools.nsc.util.MergedClassPath;
-
-/**
- * Deps resolver.
- * Add new dependencies from mvn repo (at runetime) to Zeppelin.
- *
- * @author anthonycorbacho
- *
- */
-public class DependencyResolver {
-  Logger logger = LoggerFactory.getLogger(DependencyResolver.class);
-  private Global global;
-  private SparkIMain intp;
-  private SparkContext sc;
-  private RepositorySystem system = Booter.newRepositorySystem();
-  private List<RemoteRepository> repos = new LinkedList<RemoteRepository>();
-  private RepositorySystemSession session;
-  private DependencyFilter classpathFlter = DependencyFilterUtils.classpathFilter(
-                                                                                JavaScopes.COMPILE,
-                                                                                JavaScopes.PROVIDED,
-                                                                                JavaScopes.RUNTIME,
-                                                                                JavaScopes.SYSTEM);
-
-  private final String[] exclusions = new String[] {"org.scala-lang:scala-library",
-                                                    "org.scala-lang:scala-compiler",
-                                                    "org.scala-lang:scala-reflect",
-                                                    "org.scala-lang:scalap",
-                                                    "com.nflabs.zeppelin:zeppelin-zengine",
-                                                    "com.nflabs.zeppelin:zeppelin-spark",
-                                                    "com.nflabs.zeppelin:zeppelin-server"};
-
-  public DependencyResolver(SparkIMain intp, SparkContext sc, String localRepoPath) {
-    this.intp = intp;
-    this.global = intp.global();
-    this.sc = sc;
-    session = Booter.newRepositorySystemSession(system, localRepoPath);
-    repos.add(Booter.newCentralRepository()); // add maven central
-    repos.add(new RemoteRepository("local", "default", "file://"
-        + System.getProperty("user.home") + "/.m2/repository"));
-  }
-
-  public void addRepo(String id, String url, boolean snapshot) {
-    synchronized (repos) {
-      delRepo(id);
-      RemoteRepository rr = new RemoteRepository(id, "default", url);
-      rr.setPolicy(snapshot, null);
-      repos.add(rr);
-    }
-  }
-
-  public RemoteRepository delRepo(String id) {
-    synchronized (repos) {
-      Iterator<RemoteRepository> it = repos.iterator();
-      if (it.hasNext()) {
-        RemoteRepository repo = it.next();
-        if (repo.getId().equals(id)) {
-          it.remove();
-          return repo;
-        }
-      }
-    }
-    return null;
-  }
-
-  private void updateCompilerClassPath(URL[] urls) throws IllegalAccessException,
-      IllegalArgumentException, InvocationTargetException {
-
-    JavaPlatform platform = (JavaPlatform) global.platform();
-    MergedClassPath<AbstractFile> newClassPath = mergeUrlsIntoClassPath(platform, urls);
-
-    Method[] methods = platform.getClass().getMethods();
-    for (Method m : methods) {
-      if (m.getName().endsWith("currentClassPath_$eq")) {
-        m.invoke(platform, new Some(newClassPath));
-        break;
-      }
-    }
-
-    // NOTE: Must use reflection until this is exposed/fixed upstream in Scala
-    List<String> classPaths = new LinkedList<String>();
-    for (URL url : urls) {
-      classPaths.add(url.getPath());
-    }
-
-    // Reload all jars specified into our compiler
-    global.invalidateClassPathEntries(scala.collection.JavaConversions.asScalaBuffer(classPaths)
-        .toList());
-  }
-
-  // Until spark 1.1.x
-  // check https://github.com/apache/spark/commit/191d7cf2a655d032f160b9fa181730364681d0e7
-  private void updateRuntimeClassPath(URL[] urls) throws SecurityException, IllegalAccessException,
-      IllegalArgumentException, InvocationTargetException, NoSuchMethodException {
-    ClassLoader cl = intp.classLoader().getParent();
-    Method addURL;
-    addURL = cl.getClass().getDeclaredMethod("addURL", new Class[] {URL.class});
-    addURL.setAccessible(true);
-    for (URL url : urls) {
-      addURL.invoke(cl, url);
-    }
-  }
-
-  private MergedClassPath<AbstractFile> mergeUrlsIntoClassPath(JavaPlatform platform, URL[] urls) {
-    IndexedSeq<ClassPath<AbstractFile>> entries =
-        ((MergedClassPath<AbstractFile>) platform.classPath()).entries();
-    List<ClassPath<AbstractFile>> cp = new LinkedList<ClassPath<AbstractFile>>();
-
-    for (int i = 0; i < entries.size(); i++) {
-      cp.add(entries.apply(i));
-    }
-
-    for (URL url : urls) {
-      AbstractFile file;
-      if ("file".equals(url.getProtocol())) {
-        File f = new File(url.getPath());
-        if (f.isDirectory()) {
-          file = AbstractFile.getDirectory(scala.reflect.io.File.jfile2path(f));
-        } else {
-          file = AbstractFile.getFile(scala.reflect.io.File.jfile2path(f));
-        }
-      } else {
-        file = AbstractFile.getURL(url);
-      }
-
-      ClassPath<AbstractFile> newcp = platform.classPath().context().newClassPath(file);
-
-      // distinct
-      if (cp.contains(newcp) == false) {
-        cp.add(newcp);
-      }
-    }
-
-    return new MergedClassPath(scala.collection.JavaConversions.asScalaBuffer(cp).toIndexedSeq(),
-        platform.classPath().context());
-  }
-
-  public List<String> load(String artifact,
-      boolean addSparkContext) throws Exception {
-    return load(artifact, new LinkedList<String>(), addSparkContext);
-  }
-
-  public List<String> load(String artifact, Collection<String> excludes,
-      boolean addSparkContext) throws Exception {
-    if (StringUtils.isBlank(artifact)) {
-      // Should throw here
-      throw new RuntimeException("Invalid artifact to load");
-    }
-
-    // <groupId>:<artifactId>[:<extension>[:<classifier>]]:<version>
-    int numSplits = artifact.split(":").length;
-    if (numSplits >= 3 && numSplits <= 6) {
-      return loadFromMvn(artifact, excludes, addSparkContext);
-    } else {
-      loadFromFs(artifact, addSparkContext);
-      LinkedList<String> libs = new LinkedList<String>();
-      libs.add(artifact);
-      return libs;
-    }
-  }
-
-  private void loadFromFs(String artifact, boolean addSparkContext) throws Exception {
-    File jarFile = new File(artifact);
-
-    intp.global().new Run();
-
-    updateRuntimeClassPath(new URL[] {jarFile.toURI().toURL()});
-    updateCompilerClassPath(new URL[] {jarFile.toURI().toURL()});
-
-    if (addSparkContext) {
-      sc.addJar(jarFile.getAbsolutePath());
-    }
-  }
-
-  private List<String> loadFromMvn(String artifact, Collection<String> excludes,
-      boolean addSparkContext) throws Exception {
-    List<String> loadedLibs = new LinkedList<String>();
-    Collection<String> allExclusions = new LinkedList<String>();
-    allExclusions.addAll(excludes);
-    allExclusions.addAll(Arrays.asList(exclusions));
-
-    List<ArtifactResult> listOfArtifact;
-    listOfArtifact = getArtifactsWithDep(artifact, allExclusions);
-
-    Iterator<ArtifactResult> it = listOfArtifact.iterator();
-    while (it.hasNext()) {
-      Artifact a = it.next().getArtifact();
-      String gav = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion();
-      for (String exclude : allExclusions) {
-        if (gav.startsWith(exclude)) {
-          it.remove();
-          break;
-        }
-      }
-    }
-
-    List<URL> newClassPathList = new LinkedList<URL>();
-    List<File> files = new LinkedList<File>();
-    for (ArtifactResult artifactResult : listOfArtifact) {
-      logger.info("Load " + artifactResult.getArtifact().getGroupId() + ":"
-          + artifactResult.getArtifact().getArtifactId() + ":"
-          + artifactResult.getArtifact().getVersion());
-      newClassPathList.add(artifactResult.getArtifact().getFile().toURI().toURL());
-      files.add(artifactResult.getArtifact().getFile());
-      loadedLibs.add(artifactResult.getArtifact().getGroupId() + ":"
-          + artifactResult.getArtifact().getArtifactId() + ":"
-          + artifactResult.getArtifact().getVersion());
-    }
-
-    intp.global().new Run();
-    updateRuntimeClassPath(newClassPathList.toArray(new URL[0]));
-    updateCompilerClassPath(newClassPathList.toArray(new URL[0]));
-
-    if (addSparkContext) {
-      for (File f : files) {
-        sc.addJar(f.getAbsolutePath());
-      }
-    }
-
-    return loadedLibs;
-  }
-
-  /**
-   *
-   * @param dependency
-   * @param excludes list of pattern can either be of the form groupId:artifactId
-   * @return
-   * @throws Exception
-   */
-  public List<ArtifactResult> getArtifactsWithDep(String dependency,
-      Collection<String> excludes) throws Exception {
-    Artifact artifact = new DefaultArtifact(inferScalaVersion(dependency));
-    DependencyFilter classpathFlter = DependencyFilterUtils.classpathFilter( JavaScopes.COMPILE );
-    PatternExclusionsDependencyFilter exclusionFilter =
-        new PatternExclusionsDependencyFilter(inferScalaVersion(excludes));
-
-    CollectRequest collectRequest = new CollectRequest();
-    collectRequest.setRoot(new Dependency(artifact, JavaScopes.COMPILE));
-
-    synchronized (repos) {
-      for (RemoteRepository repo : repos) {
-        collectRequest.addRepository(repo);
-      }
-    }
-    DependencyRequest dependencyRequest = new DependencyRequest(collectRequest,
-        DependencyFilterUtils.andFilter(exclusionFilter, classpathFlter));
-    return system.resolveDependencies(session, dependencyRequest).getArtifactResults();
-  }
-
-  public static Collection<String> inferScalaVersion(Collection<String> artifact) {
-    List<String> list = new LinkedList<String>();
-    for (String a : artifact) {
-      list.add(inferScalaVersion(a));
-    }
-    return list;
-  }
-
-  public static String inferScalaVersion(String artifact) {
-    int pos = artifact.indexOf(":");
-    if (pos < 0 || pos + 2 >= artifact.length()) {
-      // failed to infer
-      return artifact;
-    }
-
-    if (':' == artifact.charAt(pos + 1)) {
-      String restOfthem = "";
-      String versionSep = ":";
-
-      String groupId = artifact.substring(0, pos);
-      int nextPos = artifact.indexOf(":", pos + 2);
-      if (nextPos < 0) {
-        if (artifact.charAt(artifact.length() - 1) == '*') {
-          nextPos = artifact.length() - 1;
-          versionSep = "";
-          restOfthem = "*";
-        } else {
-          versionSep = "";
-          nextPos = artifact.length();
-        }
-      }
-
-      String artifactId = artifact.substring(pos + 2, nextPos);
-      if (nextPos < artifact.length()) {
-        if (!restOfthem.equals("*")) {
-          restOfthem = artifact.substring(nextPos + 1);
-        }
-      }
-
-      String [] version = scala.util.Properties.versionNumberString().split("[.]");
-      String scalaVersion = version[0] + "." + version[1];
-
-      return groupId + ":" + artifactId + "_" + scalaVersion + versionSep + restOfthem;
-    } else {
-      return artifact;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Repository.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Repository.java b/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Repository.java
deleted file mode 100644
index 8ca5fe7..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/Repository.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-/**
- *
- *
- */
-public class Repository {
-  private boolean snapshot = false;
-  private String name;
-  private String url;
-
-  public Repository(String name){
-    this.name = name;
-  }
-
-  public Repository url(String url) {
-    this.url = url;
-    return this;
-  }
-
-  public Repository snapshot() {
-    snapshot = true;
-    return this;
-  }
-
-  public boolean isSnapshot() {
-    return snapshot;
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  public String getUrl() {
-    return url;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/dep/RepositoryListener.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/RepositoryListener.java b/spark/src/main/java/com/nflabs/zeppelin/spark/dep/RepositoryListener.java
deleted file mode 100644
index 0fed51d..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/RepositoryListener.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.sonatype.aether.AbstractRepositoryListener;
-import org.sonatype.aether.RepositoryEvent;
-
-/**
- * Simple listener that print log.
- * 
- * @author anthonycorbacho
- *
- */
-public class RepositoryListener extends AbstractRepositoryListener {
-  Logger logger = LoggerFactory.getLogger(RepositoryListener.class);
-
-  public RepositoryListener() {}
-
-  public void artifactDeployed(RepositoryEvent event) {
-    logger.info("Deployed " + event.getArtifact() + " to " + event.getRepository());
-  }
-
-  public void artifactDeploying(RepositoryEvent event) {
-    logger.info("Deploying " + event.getArtifact() + " to " + event.getRepository());
-  }
-
-  public void artifactDescriptorInvalid(RepositoryEvent event) {
-    logger.info("Invalid artifact descriptor for " + event.getArtifact() + ": "
-                                                   + event.getException().getMessage());
-  }
-
-  public void artifactDescriptorMissing(RepositoryEvent event) {
-    logger.info("Missing artifact descriptor for " + event.getArtifact());
-  }
-
-  public void artifactInstalled(RepositoryEvent event) {
-    logger.info("Installed " + event.getArtifact() + " to " + event.getFile());
-  }
-
-  public void artifactInstalling(RepositoryEvent event) {
-    logger.info("Installing " + event.getArtifact() + " to " + event.getFile());
-  }
-
-  public void artifactResolved(RepositoryEvent event) {
-    logger.info("Resolved artifact " + event.getArtifact() + " from " + event.getRepository());
-  }
-
-  public void artifactDownloading(RepositoryEvent event) {
-    logger.info("Downloading artifact " + event.getArtifact() + " from " + event.getRepository());
-  }
-
-  public void artifactDownloaded(RepositoryEvent event) {
-    logger.info("Downloaded artifact " + event.getArtifact() + " from " + event.getRepository());
-  }
-
-  public void artifactResolving(RepositoryEvent event) {
-    logger.info("Resolving artifact " + event.getArtifact());
-  }
-
-  public void metadataDeployed(RepositoryEvent event) {
-    logger.info("Deployed " + event.getMetadata() + " to " + event.getRepository());
-  }
-
-  public void metadataDeploying(RepositoryEvent event) {
-    logger.info("Deploying " + event.getMetadata() + " to " + event.getRepository());
-  }
-
-  public void metadataInstalled(RepositoryEvent event) {
-    logger.info("Installed " + event.getMetadata() + " to " + event.getFile());
-  }
-
-  public void metadataInstalling(RepositoryEvent event) {
-    logger.info("Installing " + event.getMetadata() + " to " + event.getFile());
-  }
-
-  public void metadataInvalid(RepositoryEvent event) {
-    logger.info("Invalid metadata " + event.getMetadata());
-  }
-
-  public void metadataResolved(RepositoryEvent event) {
-    logger.info("Resolved metadata " + event.getMetadata() + " from " + event.getRepository());
-  }
-
-  public void metadataResolving(RepositoryEvent event) {
-    logger.info("Resolving metadata " + event.getMetadata() + " from " + event.getRepository());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/dep/RepositorySystemFactory.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/RepositorySystemFactory.java b/spark/src/main/java/com/nflabs/zeppelin/spark/dep/RepositorySystemFactory.java
deleted file mode 100644
index cf48a33..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/RepositorySystemFactory.java
+++ /dev/null
@@ -1,52 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-import org.apache.maven.repository.internal.DefaultServiceLocator;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.providers.http.HttpWagon;
-import org.apache.maven.wagon.providers.http.LightweightHttpWagon;
-import org.sonatype.aether.RepositorySystem;
-import org.sonatype.aether.connector.file.FileRepositoryConnectorFactory;
-import org.sonatype.aether.connector.wagon.WagonProvider;
-import org.sonatype.aether.connector.wagon.WagonRepositoryConnectorFactory;
-import org.sonatype.aether.spi.connector.RepositoryConnectorFactory;
-
-/**
- * Get maven repository instance.
- *
- * @author anthonycorbacho
- *
- */
-public class RepositorySystemFactory {
-  public static RepositorySystem newRepositorySystem() {
-    DefaultServiceLocator locator = new DefaultServiceLocator();
-    locator.addService(RepositoryConnectorFactory.class, FileRepositoryConnectorFactory.class);
-    locator.addService(RepositoryConnectorFactory.class, WagonRepositoryConnectorFactory.class);
-    locator.setServices(WagonProvider.class, new ManualWagonProvider());
-
-    return locator.getService(RepositorySystem.class);
-  }
-
-  /**
-   * ManualWagonProvider
-   */
-  public static class ManualWagonProvider implements WagonProvider {
-
-    @Override
-    public Wagon lookup(String roleHint) throws Exception {
-      if ("http".equals(roleHint)) {
-        return new LightweightHttpWagon();
-      }
-
-      if ("https".equals(roleHint)) {
-        return new HttpWagon();
-      }
-
-      return null;
-    }
-
-    @Override
-    public void release(Wagon arg0) {
-
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/com/nflabs/zeppelin/spark/dep/TransferListener.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/TransferListener.java b/spark/src/main/java/com/nflabs/zeppelin/spark/dep/TransferListener.java
deleted file mode 100644
index faecf54..0000000
--- a/spark/src/main/java/com/nflabs/zeppelin/spark/dep/TransferListener.java
+++ /dev/null
@@ -1,130 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-import java.io.PrintStream;
-import java.text.DecimalFormat;
-import java.text.DecimalFormatSymbols;
-import java.util.Locale;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.sonatype.aether.transfer.AbstractTransferListener;
-import org.sonatype.aether.transfer.TransferEvent;
-import org.sonatype.aether.transfer.TransferResource;
-
-/**
- * Simple listener that show deps downloading progress.
- * 
- * @author anthonycorbacho
- *
- */
-public class TransferListener extends AbstractTransferListener {
-  Logger logger = LoggerFactory.getLogger(TransferListener.class);
-  private PrintStream out;
-
-  private Map<TransferResource, Long> downloads = new ConcurrentHashMap<TransferResource, Long>();
-
-  private int lastLength;
-
-  public TransferListener() {}
-
-  @Override
-  public void transferInitiated(TransferEvent event) {
-    String message =
-        event.getRequestType() == TransferEvent.RequestType.PUT ? "Uploading" : "Downloading";
-
-    logger.info(message + ": " + event.getResource().getRepositoryUrl()
-                + event.getResource().getResourceName());
-  }
-
-  @Override
-  public void transferProgressed(TransferEvent event) {
-    TransferResource resource = event.getResource();
-    downloads.put(resource, Long.valueOf(event.getTransferredBytes()));
-
-    StringBuilder buffer = new StringBuilder(64);
-
-    for (Map.Entry<TransferResource, Long> entry : downloads.entrySet()) {
-      long total = entry.getKey().getContentLength();
-      long complete = entry.getValue().longValue();
-
-      buffer.append(getStatus(complete, total)).append("  ");
-    }
-
-    int pad = lastLength - buffer.length();
-    lastLength = buffer.length();
-    pad(buffer, pad);
-    buffer.append('\r');
-
-    logger.info(buffer.toString());
-  }
-
-  private String getStatus(long complete, long total) {
-    if (total >= 1024) {
-      return toKB(complete) + "/" + toKB(total) + " KB ";
-    } else if (total >= 0) {
-      return complete + "/" + total + " B ";
-    } else if (complete >= 1024) {
-      return toKB(complete) + " KB ";
-    } else {
-      return complete + " B ";
-    }
-  }
-
-  private void pad(StringBuilder buffer, int spaces) {
-    String block = "                                        ";
-    while (spaces > 0) {
-      int n = Math.min(spaces, block.length());
-      buffer.append(block, 0, n);
-      spaces -= n;
-    }
-  }
-
-  @Override
-  public void transferSucceeded(TransferEvent event) {
-    transferCompleted(event);
-
-    TransferResource resource = event.getResource();
-    long contentLength = event.getTransferredBytes();
-    if (contentLength >= 0) {
-      String type =
-          (event.getRequestType() == TransferEvent.RequestType.PUT ? "Uploaded" : "Downloaded");
-      String len = contentLength >= 1024 ? toKB(contentLength) + " KB" : contentLength + " B";
-
-      String throughput = "";
-      long duration = System.currentTimeMillis() - resource.getTransferStartTime();
-      if (duration > 0) {
-        DecimalFormat format = new DecimalFormat("0.0", new DecimalFormatSymbols(Locale.ENGLISH));
-        double kbPerSec = (contentLength / 1024.0) / (duration / 1000.0);
-        throughput = " at " + format.format(kbPerSec) + " KB/sec";
-      }
-
-      logger.info(type + ": " + resource.getRepositoryUrl() + resource.getResourceName() + " ("
-          + len + throughput + ")");
-    }
-  }
-
-  @Override
-  public void transferFailed(TransferEvent event) {
-    transferCompleted(event);
-    event.getException().printStackTrace(out);
-  }
-
-  private void transferCompleted(TransferEvent event) {
-    downloads.remove(event.getResource());
-    StringBuilder buffer = new StringBuilder(64);
-    pad(buffer, lastLength);
-    buffer.append('\r');
-    logger.info(buffer.toString());
-  }
-
-  public void transferCorrupted(TransferEvent event) {
-    event.getException().printStackTrace(out);
-  }
-
-  protected long toKB(long bytes) {
-    return (bytes + 1023) / 1024;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/DepInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/DepInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/DepInterpreter.java
new file mode 100644
index 0000000..16bd0f0
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/DepInterpreter.java
@@ -0,0 +1,289 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintStream;
+import java.io.PrintWriter;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.spark.repl.SparkILoop;
+import org.apache.spark.repl.SparkIMain;
+import org.apache.spark.repl.SparkJLineCompletion;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.interpreter.WrappedInterpreter;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.spark.dep.DependencyContext;
+import org.sonatype.aether.resolution.ArtifactResolutionException;
+import org.sonatype.aether.resolution.DependencyResolutionException;
+
+import scala.Console;
+import scala.None;
+import scala.Some;
+import scala.tools.nsc.Settings;
+import scala.tools.nsc.interpreter.Completion.Candidates;
+import scala.tools.nsc.interpreter.Completion.ScalaCompleter;
+import scala.tools.nsc.settings.MutableSettings.BooleanSetting;
+import scala.tools.nsc.settings.MutableSettings.PathSetting;
+
+
+/**
+ * DepInterpreter downloads dependencies and pass them when SparkInterpreter initialized.
+ * It extends SparkInterpreter but does not create sparkcontext
+ *
+ */
+public class DepInterpreter extends Interpreter {
+
+  static {
+    Interpreter.register(
+        "dep",
+        "spark",
+        DepInterpreter.class.getName(),
+        new InterpreterPropertyBuilder()
+            .add("zeppelin.dep.localrepo", "local-repo", "local repository for dependency loader")
+            .build());
+
+  }
+
+  private SparkIMain intp;
+  private ByteArrayOutputStream out;
+  private DependencyContext depc;
+  private SparkJLineCompletion completor;
+  private SparkILoop interpreter;
+
+  public DepInterpreter(Properties property) {
+    super(property);
+  }
+
+  public DependencyContext getDependencyContext() {
+    return depc;
+  }
+
+
+  @Override
+  public void close() {
+    if (intp != null) {
+      intp.close();
+    }
+  }
+
+  @Override
+  public void open() {
+    out = new ByteArrayOutputStream();
+    createIMain();
+  }
+
+
+  private void createIMain() {
+    Settings settings = new Settings();
+    URL[] urls = getClassloaderUrls();
+
+    // set classpath for scala compiler
+    PathSetting pathSettings = settings.classpath();
+    String classpath = "";
+    List<File> paths = currentClassPath();
+    for (File f : paths) {
+      if (classpath.length() > 0) {
+        classpath += File.pathSeparator;
+      }
+      classpath += f.getAbsolutePath();
+    }
+
+    if (urls != null) {
+      for (URL u : urls) {
+        if (classpath.length() > 0) {
+          classpath += File.pathSeparator;
+        }
+        classpath += u.getFile();
+      }
+    }
+
+    pathSettings.v_$eq(classpath);
+    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);
+
+    // set classloader for scala compiler
+    settings.explicitParentLoader_$eq(new Some<ClassLoader>(Thread.currentThread()
+        .getContextClassLoader()));
+
+    BooleanSetting b = (BooleanSetting) settings.usejavacp();
+    b.v_$eq(true);
+    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
+
+    interpreter = new SparkILoop(null, new PrintWriter(out));
+    interpreter.settings_$eq(settings);
+
+    interpreter.createInterpreter();
+
+
+    intp = interpreter.intp();
+    intp.setContextClassLoader();
+    intp.initializeSynchronous();
+
+    depc = new DependencyContext(getProperty("zeppelin.dep.localrepo"));
+    completor = new SparkJLineCompletion(intp);
+
+    intp.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
+    Map<String, Object> binder = (Map<String, Object>) getValue("_binder");
+    binder.put("depc", depc);
+
+    intp.interpret("@transient val z = "
+        + "_binder.get(\"depc\").asInstanceOf[org.apache.zeppelin.spark.dep.DependencyContext]");
+
+  }
+
+  public Object getValue(String name) {
+    Object ret = intp.valueOfTerm(name);
+    if (ret instanceof None) {
+      return null;
+    } else if (ret instanceof Some) {
+      return ((Some) ret).get();
+    } else {
+      return ret;
+    }
+  }
+
+  @Override
+  public InterpreterResult interpret(String st, InterpreterContext context) {
+    PrintStream printStream = new PrintStream(out);
+    Console.setOut(printStream);
+    out.reset();
+
+    SparkInterpreter sparkInterpreter = getSparkInterpreter();
+
+    if (sparkInterpreter != null && sparkInterpreter.isSparkContextInitialized()) {
+      return new InterpreterResult(Code.ERROR,
+          "Must be used before SparkInterpreter (%spark) initialized");
+    }
+
+    scala.tools.nsc.interpreter.Results.Result ret = intp.interpret(st);
+    Code code = getResultCode(ret);
+
+    try {
+      depc.fetch();
+    } catch (MalformedURLException | DependencyResolutionException
+        | ArtifactResolutionException e) {
+      return new InterpreterResult(Code.ERROR, e.toString());
+    }
+
+    if (code == Code.INCOMPLETE) {
+      return new InterpreterResult(code, "Incomplete expression");
+    } else if (code == Code.ERROR) {
+      return new InterpreterResult(code, out.toString());
+    } else {
+      return new InterpreterResult(code, out.toString());
+    }
+  }
+
+  private Code getResultCode(scala.tools.nsc.interpreter.Results.Result r) {
+    if (r instanceof scala.tools.nsc.interpreter.Results.Success$) {
+      return Code.SUCCESS;
+    } else if (r instanceof scala.tools.nsc.interpreter.Results.Incomplete$) {
+      return Code.INCOMPLETE;
+    } else {
+      return Code.ERROR;
+    }
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {
+  }
+
+
+  @Override
+  public FormType getFormType() {
+    return FormType.NATIVE;
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    return 0;
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    ScalaCompleter c = completor.completer();
+    Candidates ret = c.complete(buf, cursor);
+    return scala.collection.JavaConversions.asJavaList(ret.candidates());
+  }
+
+  private List<File> currentClassPath() {
+    List<File> paths = classPath(Thread.currentThread().getContextClassLoader());
+    String[] cps = System.getProperty("java.class.path").split(File.pathSeparator);
+    if (cps != null) {
+      for (String cp : cps) {
+        paths.add(new File(cp));
+      }
+    }
+    return paths;
+  }
+
+  private List<File> classPath(ClassLoader cl) {
+    List<File> paths = new LinkedList<File>();
+    if (cl == null) {
+      return paths;
+    }
+
+    if (cl instanceof URLClassLoader) {
+      URLClassLoader ucl = (URLClassLoader) cl;
+      URL[] urls = ucl.getURLs();
+      if (urls != null) {
+        for (URL url : urls) {
+          paths.add(new File(url.getFile()));
+        }
+      }
+    }
+    return paths;
+  }
+
+  private SparkInterpreter getSparkInterpreter() {
+    InterpreterGroup intpGroup = getInterpreterGroup();
+    if (intpGroup == null) {
+      return null;
+    }
+    synchronized (intpGroup) {
+      for (Interpreter intp : intpGroup){
+        if (intp.getClassName().equals(SparkInterpreter.class.getName())) {
+          Interpreter p = intp;
+          while (p instanceof WrappedInterpreter) {
+            p = ((WrappedInterpreter) p).getInnerInterpreter();
+          }
+          return (SparkInterpreter) p;
+        }
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    return getSparkInterpreter().getScheduler();
+  }
+
+}


[02/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/util/Util.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/util/Util.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/util/Util.java
deleted file mode 100644
index 5d45b06..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/util/Util.java
+++ /dev/null
@@ -1,170 +0,0 @@
-package com.nflabs.zeppelin.util;
-
-import java.util.ArrayList;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * TODO(moon) : add description.
- * 
- * @author Leemoonsoo
- *
- */
-public class Util {
-
-  public static String[] split(String str, char split) {
-    return split(str, new String[] {String.valueOf(split)}, false);
-  }
-
-  public static String[] split(String str, String[] splitters, boolean includeSplitter) {
-    String escapeSeq = "\"',;<%>";
-    char escapeChar = '\\';
-    String[] blockStart = new String[] {"\"", "'", "<%", "N_<"};
-    String[] blockEnd = new String[] {"\"", "'", "%>", "N_>"};
-
-    return split(str, escapeSeq, escapeChar, blockStart, blockEnd, splitters, includeSplitter);
-
-  }
-
-  public static String[] split(String str, String escapeSeq, char escapeChar, String[] blockStart,
-      String[] blockEnd, String[] splitters, boolean includeSplitter) {
-
-    List<String> splits = new ArrayList<String>();
-
-    String curString = "";
-
-    boolean escape = false; // true when escape char is found
-    int lastEscapeOffset = -1;
-    int blockStartPos = -1;
-    List<Integer> blockStack = new LinkedList<Integer>();
-
-    for (int i = 0; i < str.length(); i++) {
-      char c = str.charAt(i);
-
-      // escape char detected
-      if (c == escapeChar && escape == false) {
-        escape = true;
-        continue;
-      }
-
-      // escaped char comes
-      if (escape == true) {
-        if (escapeSeq.indexOf(c) < 0) {
-          curString += escapeChar;
-        }
-        curString += c;
-        escape = false;
-        lastEscapeOffset = curString.length();
-        continue;
-      }
-
-      if (blockStack.size() > 0) { // inside of block
-        curString += c;
-        // check multichar block
-        boolean multicharBlockDetected = false;
-        for (int b = 0; b < blockStart.length; b++) {
-          if (blockStartPos >= 0
-              && getBlockStr(blockStart[b]).compareTo(str.substring(blockStartPos, i)) == 0) {
-            blockStack.remove(0);
-            blockStack.add(0, b);
-            multicharBlockDetected = true;
-            break;
-          }
-        }
-        if (multicharBlockDetected == true) {
-          continue;
-        }
-
-        // check if current block is nestable
-        if (isNestedBlock(blockStart[blockStack.get(0)]) == true) {
-          // try to find nested block start
-
-          if (curString.substring(lastEscapeOffset + 1).endsWith(
-              getBlockStr(blockStart[blockStack.get(0)])) == true) {
-            blockStack.add(0, blockStack.get(0)); // block is started
-            blockStartPos = i;
-            continue;
-          }
-        }
-
-        // check if block is finishing
-        if (curString.substring(lastEscapeOffset + 1).endsWith(
-            getBlockStr(blockEnd[blockStack.get(0)]))) {
-          // the block closer is one of the splitters (and not nested block)
-          if (isNestedBlock(blockEnd[blockStack.get(0)]) == false) {
-            for (String splitter : splitters) {
-              if (splitter.compareTo(getBlockStr(blockEnd[blockStack.get(0)])) == 0) {
-                splits.add(curString);
-                if (includeSplitter == true) {
-                  splits.add(splitter);
-                }
-                curString = "";
-                lastEscapeOffset = -1;
-
-                break;
-              }
-            }
-          }
-          blockStartPos = -1;
-          blockStack.remove(0);
-          continue;
-        }
-
-      } else { // not in the block
-        boolean splitted = false;
-        for (String splitter : splitters) {
-          // forward check for splitter
-          if (splitter.compareTo(
-              str.substring(i, Math.min(i + splitter.length(), str.length()))) == 0) {
-            splits.add(curString);
-            if (includeSplitter == true) {
-              splits.add(splitter);
-            }
-            curString = "";
-            lastEscapeOffset = -1;
-            i += splitter.length() - 1;
-            splitted = true;
-            break;
-          }
-        }
-        if (splitted == true) {
-          continue;
-        }
-
-        // add char to current string
-        curString += c;
-
-        // check if block is started
-        for (int b = 0; b < blockStart.length; b++) {
-          if (curString.substring(lastEscapeOffset + 1)
-                       .endsWith(getBlockStr(blockStart[b])) == true) {
-            blockStack.add(0, b); // block is started
-            blockStartPos = i;
-            break;
-          }
-        }
-      }
-    }
-    if (curString.length() > 0) {
-      splits.add(curString.trim());
-    }
-    return splits.toArray(new String[] {});
-
-  }
-
-  private static String getBlockStr(String blockDef) {
-    if (blockDef.startsWith("N_")) {
-      return blockDef.substring("N_".length());
-    } else {
-      return blockDef;
-    }
-  }
-
-  private static boolean isNestedBlock(String blockDef) {
-    if (blockDef.startsWith("N_")) {
-      return true;
-    } else {
-      return false;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java
new file mode 100644
index 0000000..8495bb5
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java
@@ -0,0 +1,531 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.conf;
+
+import java.net.URL;
+import java.util.List;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.XMLConfiguration;
+import org.apache.commons.configuration.tree.ConfigurationNode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * Zeppelin configuration.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class ZeppelinConfiguration extends XMLConfiguration {
+  private static final String ZEPPELIN_SITE_XML = "zeppelin-site.xml";
+  private static final long serialVersionUID = 4749305895693848035L;
+  private static final Logger LOG = LoggerFactory.getLogger(ZeppelinConfiguration.class);
+  private static ZeppelinConfiguration conf;
+
+  public ZeppelinConfiguration(URL url) throws ConfigurationException {
+    setDelimiterParsingDisabled(true);
+    load(url);
+  }
+
+  public ZeppelinConfiguration() {
+    ConfVars[] vars = ConfVars.values();
+    for (ConfVars v : vars) {
+      if (v.getType() == ConfVars.VarType.BOOLEAN) {
+        this.setProperty(v.getVarName(), v.getBooleanValue());
+      } else if (v.getType() == ConfVars.VarType.LONG) {
+        this.setProperty(v.getVarName(), v.getLongValue());
+      } else if (v.getType() == ConfVars.VarType.INT) {
+        this.setProperty(v.getVarName(), v.getIntValue());
+      } else if (v.getType() == ConfVars.VarType.FLOAT) {
+        this.setProperty(v.getVarName(), v.getFloatValue());
+      } else if (v.getType() == ConfVars.VarType.STRING) {
+        this.setProperty(v.getVarName(), v.getStringValue());
+      } else {
+        throw new RuntimeException("Unsupported VarType");
+      }
+    }
+
+  }
+
+
+  /**
+   * Load from resource.
+   *
+   * @throws ConfigurationException
+   */
+  public static ZeppelinConfiguration create() {
+    if (conf != null) {
+      return conf;
+    }
+
+    ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+    URL url;
+
+    url = ZeppelinConfiguration.class.getResource(ZEPPELIN_SITE_XML);
+    if (url == null) {
+      ClassLoader cl = ZeppelinConfiguration.class.getClassLoader();
+      if (cl != null) {
+        url = cl.getResource(ZEPPELIN_SITE_XML);
+      }
+    }
+    if (url == null) {
+      url = classLoader.getResource(ZEPPELIN_SITE_XML);
+    }
+
+    if (url == null) {
+      LOG.warn("Failed to load configuration, proceeding with a default");
+      conf = new ZeppelinConfiguration();
+    } else {
+      try {
+        LOG.info("Load configuration from " + url);
+        conf = new ZeppelinConfiguration(url);
+      } catch (ConfigurationException e) {
+        LOG.warn("Failed to load configuration from " + url + " proceeding with a default", e);
+        conf = new ZeppelinConfiguration();
+      }
+    }
+
+    return conf;
+  }
+
+
+  private String getStringValue(String name, String d) {
+    List<ConfigurationNode> properties = getRootNode().getChildren();
+    if (properties == null || properties.size() == 0) {
+      return d;
+    }
+    for (ConfigurationNode p : properties) {
+      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
+          && name.equals(p.getChildren("name").get(0).getValue())) {
+        return (String) p.getChildren("value").get(0).getValue();
+      }
+    }
+    return d;
+  }
+
+  private int getIntValue(String name, int d) {
+    List<ConfigurationNode> properties = getRootNode().getChildren();
+    if (properties == null || properties.size() == 0) {
+      return d;
+    }
+    for (ConfigurationNode p : properties) {
+      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
+          && name.equals(p.getChildren("name").get(0).getValue())) {
+        return Integer.parseInt((String) p.getChildren("value").get(0).getValue());
+      }
+    }
+    return d;
+  }
+
+  private long getLongValue(String name, long d) {
+    List<ConfigurationNode> properties = getRootNode().getChildren();
+    if (properties == null || properties.size() == 0) {
+      return d;
+    }
+    for (ConfigurationNode p : properties) {
+      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
+          && name.equals(p.getChildren("name").get(0).getValue())) {
+        return Long.parseLong((String) p.getChildren("value").get(0).getValue());
+      }
+    }
+    return d;
+  }
+
+  private float getFloatValue(String name, float d) {
+    List<ConfigurationNode> properties = getRootNode().getChildren();
+    if (properties == null || properties.size() == 0) {
+      return d;
+    }
+    for (ConfigurationNode p : properties) {
+      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
+          && name.equals(p.getChildren("name").get(0).getValue())) {
+        return Float.parseFloat((String) p.getChildren("value").get(0).getValue());
+      }
+    }
+    return d;
+  }
+
+  private boolean getBooleanValue(String name, boolean d) {
+    List<ConfigurationNode> properties = getRootNode().getChildren();
+    if (properties == null || properties.size() == 0) {
+      return d;
+    }
+    for (ConfigurationNode p : properties) {
+      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
+          && name.equals(p.getChildren("name").get(0).getValue())) {
+        return Boolean.parseBoolean((String) p.getChildren("value").get(0).getValue());
+      }
+    }
+    return d;
+  }
+
+  public String getString(ConfVars c) {
+    return getString(c.name(), c.getVarName(), c.getStringValue());
+  }
+
+  public String getString(String envName, String propertyName, String defaultValue) {
+    if (System.getenv(envName) != null) {
+      return System.getenv(envName);
+    }
+    if (System.getProperty(propertyName) != null) {
+      return System.getProperty(propertyName);
+    }
+
+    return getStringValue(propertyName, defaultValue);
+  }
+
+  public int getInt(ConfVars c) {
+    return getInt(c.name(), c.getVarName(), c.getIntValue());
+  }
+
+  public int getInt(String envName, String propertyName, int defaultValue) {
+    if (System.getenv(envName) != null) {
+      return Integer.parseInt(System.getenv(envName));
+    }
+
+    if (System.getProperty(propertyName) != null) {
+      return Integer.parseInt(System.getProperty(propertyName));
+    }
+    return getIntValue(propertyName, defaultValue);
+  }
+
+  public long getLong(ConfVars c) {
+    return getLong(c.name(), c.getVarName(), c.getLongValue());
+  }
+
+  public long getLong(String envName, String propertyName, long defaultValue) {
+    if (System.getenv(envName) != null) {
+      return Long.parseLong(System.getenv(envName));
+    }
+
+    if (System.getProperty(propertyName) != null) {
+      return Long.parseLong(System.getProperty(propertyName));
+    }
+    return getLongValue(propertyName, defaultValue);
+  }
+
+  public float getFloat(ConfVars c) {
+    return getFloat(c.name(), c.getVarName(), c.getFloatValue());
+  }
+
+  public float getFloat(String envName, String propertyName, float defaultValue) {
+    if (System.getenv(envName) != null) {
+      return Float.parseFloat(System.getenv(envName));
+    }
+    if (System.getProperty(propertyName) != null) {
+      return Float.parseFloat(System.getProperty(propertyName));
+    }
+    return getFloatValue(propertyName, defaultValue);
+  }
+
+  public boolean getBoolean(ConfVars c) {
+    return getBoolean(c.name(), c.getVarName(), c.getBooleanValue());
+  }
+
+  public boolean getBoolean(String envName, String propertyName, boolean defaultValue) {
+    if (System.getenv(envName) != null) {
+      return Boolean.parseBoolean(System.getenv(envName));
+    }
+
+    if (System.getProperty(propertyName) != null) {
+      return Boolean.parseBoolean(System.getProperty(propertyName));
+    }
+    return getBooleanValue(propertyName, defaultValue);
+  }
+
+  public boolean useSsl() {
+    return getBoolean(ConfVars.ZEPPELIN_SSL);
+  }
+
+  public boolean useClientAuth() {
+    return getBoolean(ConfVars.ZEPPELIN_SSL_CLIENT_AUTH);
+  }
+
+  public int getServerPort() {
+    return getInt(ConfVars.ZEPPELIN_PORT);
+  }
+
+  public int getWebSocketPort() {
+    int port = getInt(ConfVars.ZEPPELIN_WEBSOCKET_PORT);
+    if (port < 0) {
+      return getServerPort() + 1;
+    } else {
+      return port;
+    }
+  }
+
+  public String getKeyStorePath() {
+    return getRelativeDir(ConfVars.ZEPPELIN_SSL_KEYSTORE_PATH);
+  }
+
+  public String getKeyStoreType() {
+    return getString(ConfVars.ZEPPELIN_SSL_KEYSTORE_TYPE);
+  }
+
+  public String getKeyStorePassword() {
+    return getString(ConfVars.ZEPPELIN_SSL_KEYSTORE_PASSWORD);
+  }
+
+  public String getKeyManagerPassword() {
+    String password = getString(ConfVars.ZEPPELIN_SSL_KEY_MANAGER_PASSWORD);
+    if (password == null) {
+      return getKeyStorePassword();
+    } else {
+      return password;
+    }
+  }
+
+  public String getTrustStorePath() {
+    String path = getString(ConfVars.ZEPPELIN_SSL_TRUSTSTORE_PATH);
+    if (path == null) {
+      return getKeyStorePath();
+    } else {
+      return getRelativeDir(path);
+    }
+  }
+
+  public String getTrustStoreType() {
+    String type = getString(ConfVars.ZEPPELIN_SSL_TRUSTSTORE_TYPE);
+    if (type == null) {
+      return getKeyStoreType();
+    } else {
+      return type;
+    }
+  }
+
+  public String getTrustStorePassword() {
+    String password = getString(ConfVars.ZEPPELIN_SSL_TRUSTSTORE_PASSWORD);
+    if (password == null) {
+      return getKeyStorePassword();
+    } else {
+      return password;
+    }
+  }
+
+  public String getNotebookDir() {
+    return getRelativeDir(ConfVars.ZEPPELIN_NOTEBOOK_DIR);
+  }
+
+  public String getInterpreterDir() {
+    return getRelativeDir(ConfVars.ZEPPELIN_INTERPRETER_DIR);
+  }
+
+  public String getInterpreterSettingPath() {
+    return getRelativeDir("conf/interpreter.json");
+  }
+
+  public String getInterpreterRemoteRunnerPath() {
+    return getRelativeDir(ConfVars.ZEPPELIN_INTERPRETER_REMOTE_RUNNER);
+  }
+
+  public String getRelativeDir(ConfVars c) {
+    return getRelativeDir(getString(c));
+  }
+
+  public String getRelativeDir(String path) {
+    if (path != null && path.startsWith("/")) {
+      return path;
+    } else {
+      return getString(ConfVars.ZEPPELIN_HOME) + "/" + path;
+    }
+  }
+
+
+  /**
+   * Wrapper class.
+   *
+   * @author Leemoonsoo
+   *
+   */
+  public static enum ConfVars {
+    ZEPPELIN_HOME("zeppelin.home", "../"),
+    ZEPPELIN_PORT("zeppelin.server.port", 8080),
+    // negative websocket port denotes that server port + 1 should be used
+    ZEPPELIN_WEBSOCKET_PORT("zeppelin.websocket.port", -1),
+    ZEPPELIN_SSL("zeppelin.ssl", false),
+    ZEPPELIN_SSL_CLIENT_AUTH("zeppelin.ssl.client.auth", false),
+    ZEPPELIN_SSL_KEYSTORE_PATH("zeppelin.ssl.keystore.path", "conf/keystore"),
+    ZEPPELIN_SSL_KEYSTORE_TYPE("zeppelin.ssl.keystore.type", "JKS"),
+    ZEPPELIN_SSL_KEYSTORE_PASSWORD("zeppelin.ssl.keystore.password", ""),
+    ZEPPELIN_SSL_KEY_MANAGER_PASSWORD("zeppelin.ssl.key.manager.password", null),
+    ZEPPELIN_SSL_TRUSTSTORE_PATH("zeppelin.ssl.truststore.path", null),
+    ZEPPELIN_SSL_TRUSTSTORE_TYPE("zeppelin.ssl.truststore.type", null),
+    ZEPPELIN_SSL_TRUSTSTORE_PASSWORD("zeppelin.ssl.truststore.password", null),
+    ZEPPELIN_WAR("zeppelin.war", "../zeppelin-web/src/main/webapp"),
+    ZEPPELIN_API_WAR("zeppelin.api.war", "../zeppelin-docs/src/main/swagger"),
+    ZEPPELIN_INTERPRETERS("zeppelin.interpreters", "org.apache.zeppelin.spark.SparkInterpreter,"
+        + "org.apache.zeppelin.spark.PySparkInterpreter,"
+        + "org.apache.zeppelin.spark.SparkSqlInterpreter,"
+        + "org.apache.zeppelin.spark.DepInterpreter,"
+        + "org.apache.zeppelin.markdown.Markdown,"
+        + "org.apache.zeppelin.shell.ShellInterpreter"),
+        ZEPPELIN_INTERPRETER_DIR("zeppelin.interpreter.dir", "interpreter"),
+        ZEPPELIN_ENCODING("zeppelin.encoding", "UTF-8"),
+        ZEPPELIN_NOTEBOOK_DIR("zeppelin.notebook.dir", "notebook"),
+    ZEPPELIN_INTERPRETER_REMOTE_RUNNER("zeppelin.interpreter.remoterunner", "bin/interpreter.sh"),
+    // Decide when new note is created, interpreter settings will be binded automatically or not.
+    ZEPPELIN_NOTEBOOK_AUTO_INTERPRETER_BINDING("zeppelin.notebook.autoInterpreterBinding", true);
+
+    private String varName;
+    @SuppressWarnings("rawtypes")
+    private Class varClass;
+    private String stringValue;
+    private VarType type;
+    private int intValue;
+    private float floatValue;
+    private boolean booleanValue;
+    private long longValue;
+
+
+    ConfVars(String varName, String varValue) {
+      this.varName = varName;
+      this.varClass = String.class;
+      this.stringValue = varValue;
+      this.intValue = -1;
+      this.floatValue = -1;
+      this.longValue = -1;
+      this.booleanValue = false;
+      this.type = VarType.STRING;
+    }
+
+    ConfVars(String varName, int intValue) {
+      this.varName = varName;
+      this.varClass = Integer.class;
+      this.stringValue = null;
+      this.intValue = intValue;
+      this.floatValue = -1;
+      this.longValue = -1;
+      this.booleanValue = false;
+      this.type = VarType.INT;
+    }
+
+    ConfVars(String varName, long longValue) {
+      this.varName = varName;
+      this.varClass = Integer.class;
+      this.stringValue = null;
+      this.intValue = -1;
+      this.floatValue = -1;
+      this.longValue = longValue;
+      this.booleanValue = false;
+      this.type = VarType.INT;
+    }
+
+    ConfVars(String varName, float floatValue) {
+      this.varName = varName;
+      this.varClass = Float.class;
+      this.stringValue = null;
+      this.intValue = -1;
+      this.longValue = -1;
+      this.floatValue = floatValue;
+      this.booleanValue = false;
+      this.type = VarType.FLOAT;
+    }
+
+    ConfVars(String varName, boolean booleanValue) {
+      this.varName = varName;
+      this.varClass = Boolean.class;
+      this.stringValue = null;
+      this.intValue = -1;
+      this.longValue = -1;
+      this.floatValue = -1;
+      this.booleanValue = booleanValue;
+      this.type = VarType.BOOLEAN;
+    }
+
+    public String getVarName() {
+      return varName;
+    }
+
+    @SuppressWarnings("rawtypes")
+    public Class getVarClass() {
+      return varClass;
+    }
+
+    public int getIntValue() {
+      return intValue;
+    }
+
+    public long getLongValue() {
+      return longValue;
+    }
+
+    public float getFloatValue() {
+      return floatValue;
+    }
+
+    public String getStringValue() {
+      return stringValue;
+    }
+
+    public boolean getBooleanValue() {
+      return booleanValue;
+    }
+
+    public VarType getType() {
+      return type;
+    }
+
+    enum VarType {
+      STRING {
+        @Override
+        void checkType(String value) throws Exception {}
+      },
+      INT {
+        @Override
+        void checkType(String value) throws Exception {
+          Integer.valueOf(value);
+        }
+      },
+      LONG {
+        @Override
+        void checkType(String value) throws Exception {
+          Long.valueOf(value);
+        }
+      },
+      FLOAT {
+        @Override
+        void checkType(String value) throws Exception {
+          Float.valueOf(value);
+        }
+      },
+      BOOLEAN {
+        @Override
+        void checkType(String value) throws Exception {
+          Boolean.valueOf(value);
+        }
+      };
+
+      boolean isType(String value) {
+        try {
+          checkType(value);
+        } catch (Exception e) {
+          return false;
+        }
+        return true;
+      }
+
+      String typeString() {
+        return name().toUpperCase();
+      }
+
+      abstract void checkType(String value) throws Exception;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterFactory.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterFactory.java
new file mode 100644
index 0000000..7c81e90
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterFactory.java
@@ -0,0 +1,613 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.zeppelin.conf.ZeppelinConfiguration;
+import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
+import org.apache.zeppelin.interpreter.Interpreter.RegisteredInterpreter;
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+/**
+ * Manage interpreters.
+ *
+ */
+public class InterpreterFactory {
+  Logger logger = LoggerFactory.getLogger(InterpreterFactory.class);
+
+  private Map<String, URLClassLoader> cleanCl = Collections
+      .synchronizedMap(new HashMap<String, URLClassLoader>());
+
+  private ZeppelinConfiguration conf;
+  String[] interpreterClassList;
+
+  private Map<String, InterpreterSetting> interpreterSettings =
+      new HashMap<String, InterpreterSetting>();
+
+  private Map<String, List<String>> interpreterBindings = new HashMap<String, List<String>>();
+
+  private Gson gson;
+
+  private InterpreterOption defaultOption;
+
+  public InterpreterFactory(ZeppelinConfiguration conf) throws InterpreterException, IOException {
+    this(conf, new InterpreterOption(true));
+  }
+
+
+  public InterpreterFactory(ZeppelinConfiguration conf, InterpreterOption defaultOption)
+      throws InterpreterException, IOException {
+    this.conf = conf;
+    this.defaultOption = defaultOption;
+    String replsConf = conf.getString(ConfVars.ZEPPELIN_INTERPRETERS);
+    interpreterClassList = replsConf.split(",");
+
+    GsonBuilder builder = new GsonBuilder();
+    builder.setPrettyPrinting();
+    builder.registerTypeAdapter(Interpreter.class, new InterpreterSerializer());
+    gson = builder.create();
+
+    init();
+  }
+
+  private void init() throws InterpreterException, IOException {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+
+    // Load classes
+    File[] interpreterDirs = new File(conf.getInterpreterDir()).listFiles();
+    if (interpreterDirs != null) {
+      for (File path : interpreterDirs) {
+        logger.info("Reading " + path.getAbsolutePath());
+        URL[] urls = null;
+        try {
+          urls = recursiveBuildLibList(path);
+        } catch (MalformedURLException e1) {
+          logger.error("Can't load jars ", e1);
+        }
+        URLClassLoader ccl = new URLClassLoader(urls, oldcl);
+
+        for (String className : interpreterClassList) {
+          try {
+            Class.forName(className, true, ccl);
+            Set<String> keys = Interpreter.registeredInterpreters.keySet();
+            for (String intName : keys) {
+              if (className.equals(
+                  Interpreter.registeredInterpreters.get(intName).getClassName())) {
+                Interpreter.registeredInterpreters.get(intName).setPath(path.getAbsolutePath());
+                logger.info("Interpreter " + intName + " found. class=" + className);
+                cleanCl.put(path.getAbsolutePath(), ccl);
+              }
+            }
+          } catch (ClassNotFoundException e) {
+            // nothing to do
+          }
+        }
+      }
+    }
+
+    loadFromFile();
+
+    // if no interpreter settings are loaded, create default set
+    synchronized (interpreterSettings) {
+      if (interpreterSettings.size() == 0) {
+        HashMap<String, List<RegisteredInterpreter>> groupClassNameMap =
+            new HashMap<String, List<RegisteredInterpreter>>();
+
+        for (String k : Interpreter.registeredInterpreters.keySet()) {
+          RegisteredInterpreter info = Interpreter.registeredInterpreters.get(k);
+
+          if (!groupClassNameMap.containsKey(info.getGroup())) {
+            groupClassNameMap.put(info.getGroup(), new LinkedList<RegisteredInterpreter>());
+          }
+
+          groupClassNameMap.get(info.getGroup()).add(info);
+        }
+
+        for (String className : interpreterClassList) {
+          for (String groupName : groupClassNameMap.keySet()) {
+            List<RegisteredInterpreter> infos = groupClassNameMap.get(groupName);
+
+            boolean found = false;
+            Properties p = new Properties();
+            for (RegisteredInterpreter info : infos) {
+              if (found == false && info.getClassName().equals(className)) {
+                found = true;
+              }
+
+              for (String k : info.getProperties().keySet()) {
+                p.put(k, info.getProperties().get(k).getDefaultValue());
+              }
+            }
+
+            if (found) {
+              // add all interpreters in group
+              add(groupName, groupName, defaultOption, p);
+              groupClassNameMap.remove(groupName);
+              break;
+            }
+          }
+        }
+      }
+    }
+
+    for (String settingId : interpreterSettings.keySet()) {
+      InterpreterSetting setting = interpreterSettings.get(settingId);
+      logger.info("Interpreter setting group {} : id={}, name={}",
+          setting.getGroup(), settingId, setting.getName());
+      for (Interpreter interpreter : setting.getInterpreterGroup()) {
+        logger.info("  className = {}", interpreter.getClassName());
+      }
+    }
+  }
+
+  private void loadFromFile() throws IOException {
+    GsonBuilder builder = new GsonBuilder();
+    builder.setPrettyPrinting();
+    builder.registerTypeAdapter(Interpreter.class, new InterpreterSerializer());
+    Gson gson = builder.create();
+
+    File settingFile = new File(conf.getInterpreterSettingPath());
+    if (!settingFile.exists()) {
+      // nothing to read
+      return;
+    }
+    FileInputStream fis = new FileInputStream(settingFile);
+    InputStreamReader isr = new InputStreamReader(fis);
+    BufferedReader bufferedReader = new BufferedReader(isr);
+    StringBuilder sb = new StringBuilder();
+    String line;
+    while ((line = bufferedReader.readLine()) != null) {
+      sb.append(line);
+    }
+    isr.close();
+    fis.close();
+
+    String json = sb.toString();
+    InterpreterInfoSaving info = gson.fromJson(json, InterpreterInfoSaving.class);
+
+    for (String k : info.interpreterSettings.keySet()) {
+      InterpreterSetting setting = info.interpreterSettings.get(k);
+
+      // Always use separate interpreter process
+      // While we decided to turn this feature on always (without providing
+      // enable/disable option on GUI).
+      // previously created setting should turn this feature on here.
+      setting.getOption().setRemote(true);
+
+      InterpreterGroup interpreterGroup = createInterpreterGroup(
+          setting.getGroup(),
+          setting.getOption(),
+          setting.getProperties());
+
+      InterpreterSetting intpSetting = new InterpreterSetting(
+          setting.id(),
+          setting.getName(),
+          setting.getGroup(),
+          setting.getOption(),
+          interpreterGroup);
+
+      interpreterSettings.put(k, intpSetting);
+    }
+
+    this.interpreterBindings = info.interpreterBindings;
+  }
+
+
+  private void saveToFile() throws IOException {
+    String jsonString;
+
+    synchronized (interpreterSettings) {
+      InterpreterInfoSaving info = new InterpreterInfoSaving();
+      info.interpreterBindings = interpreterBindings;
+      info.interpreterSettings = interpreterSettings;
+
+      jsonString = gson.toJson(info);
+    }
+
+    File settingFile = new File(conf.getInterpreterSettingPath());
+    if (!settingFile.exists()) {
+      settingFile.createNewFile();
+    }
+
+    FileOutputStream fos = new FileOutputStream(settingFile, false);
+    OutputStreamWriter out = new OutputStreamWriter(fos);
+    out.append(jsonString);
+    out.close();
+    fos.close();
+  }
+
+  private RegisteredInterpreter getRegisteredReplInfoFromClassName(String clsName) {
+    Set<String> keys = Interpreter.registeredInterpreters.keySet();
+    for (String intName : keys) {
+      RegisteredInterpreter info = Interpreter.registeredInterpreters.get(intName);
+      if (clsName.equals(info.getClassName())) {
+        return info;
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Return ordered interpreter setting list.
+   * The list does not contain more than one setting from the same interpreter class.
+   * Order by InterpreterClass (order defined by ZEPPELIN_INTERPRETERS), Interpreter setting name
+   * @return
+   */
+  public List<String> getDefaultInterpreterSettingList() {
+    // this list will contain default interpreter setting list
+    List<String> defaultSettings = new LinkedList<String>();
+
+    // to ignore the same interpreter group
+    Map<String, Boolean> interpreterGroupCheck = new HashMap<String, Boolean>();
+
+    List<InterpreterSetting> sortedSettings = get();
+
+    for (InterpreterSetting setting : sortedSettings) {
+      if (defaultSettings.contains(setting.id())) {
+        continue;
+      }
+
+      if (!interpreterGroupCheck.containsKey(setting.getGroup())) {
+        defaultSettings.add(setting.id());
+        interpreterGroupCheck.put(setting.getGroup(), true);
+      }
+    }
+    return defaultSettings;
+  }
+
+  public List<RegisteredInterpreter> getRegisteredInterpreterList() {
+    List<RegisteredInterpreter> registeredInterpreters = new LinkedList<RegisteredInterpreter>();
+
+    for (String className : interpreterClassList) {
+      registeredInterpreters.add(Interpreter.findRegisteredInterpreterByClassName(className));
+    }
+
+    return registeredInterpreters;
+  }
+
+  /**
+   * @param name user defined name
+   * @param groupName interpreter group name to instantiate
+   * @param properties
+   * @return
+   * @throws InterpreterException
+   * @throws IOException
+   */
+  public InterpreterGroup add(String name, String groupName,
+      InterpreterOption option, Properties properties)
+      throws InterpreterException, IOException {
+    synchronized (interpreterSettings) {
+      InterpreterGroup interpreterGroup = createInterpreterGroup(groupName, option, properties);
+
+      InterpreterSetting intpSetting = new InterpreterSetting(
+          name,
+          groupName,
+          option,
+          interpreterGroup);
+      interpreterSettings.put(intpSetting.id(), intpSetting);
+
+      saveToFile();
+      return interpreterGroup;
+    }
+  }
+
+  private InterpreterGroup createInterpreterGroup(String groupName,
+      InterpreterOption option,
+      Properties properties)
+      throws InterpreterException {
+    InterpreterGroup interpreterGroup = new InterpreterGroup();
+
+    for (String className : interpreterClassList) {
+      Set<String> keys = Interpreter.registeredInterpreters.keySet();
+      for (String intName : keys) {
+        RegisteredInterpreter info = Interpreter.registeredInterpreters
+            .get(intName);
+        if (info.getClassName().equals(className)
+            && info.getGroup().equals(groupName)) {
+          Interpreter intp;
+
+          if (option.isRemote()) {
+            intp = createRemoteRepl(info.getPath(),
+                info.getClassName(),
+                properties);
+          } else {
+            intp = createRepl(info.getPath(),
+                info.getClassName(),
+                properties);
+          }
+          interpreterGroup.add(intp);
+          intp.setInterpreterGroup(interpreterGroup);
+          break;
+        }
+      }
+    }
+    return interpreterGroup;
+  }
+
+  public void remove(String id) throws IOException {
+    synchronized (interpreterSettings) {
+      if (interpreterSettings.containsKey(id)) {
+        InterpreterSetting intp = interpreterSettings.get(id);
+        intp.getInterpreterGroup().close();
+        intp.getInterpreterGroup().destroy();
+
+        interpreterSettings.remove(id);
+        for (List<String> settings : interpreterBindings.values()) {
+          Iterator<String> it = settings.iterator();
+          while (it.hasNext()) {
+            String settingId = it.next();
+            if (settingId.equals(id)) {
+              it.remove();
+            }
+          }
+        }
+        saveToFile();
+      }
+    }
+  }
+
+  /**
+   * Get loaded interpreters
+   * @return
+   */
+  public List<InterpreterSetting> get() {
+    synchronized (interpreterSettings) {
+      List<InterpreterSetting> orderedSettings = new LinkedList<InterpreterSetting>();
+      List<InterpreterSetting> settings = new LinkedList<InterpreterSetting>(
+          interpreterSettings.values());
+      Collections.sort(settings, new Comparator<InterpreterSetting>(){
+        @Override
+        public int compare(InterpreterSetting o1, InterpreterSetting o2) {
+          return o1.getName().compareTo(o2.getName());
+        }
+      });
+
+      for (String className : interpreterClassList) {
+        for (InterpreterSetting setting : settings) {
+          for (InterpreterSetting orderedSetting : orderedSettings) {
+            if (orderedSetting.id().equals(setting.id())) {
+              continue;
+            }
+          }
+
+          for (Interpreter intp : setting.getInterpreterGroup()) {
+            if (className.equals(intp.getClassName())) {
+              boolean alreadyAdded = false;
+              for (InterpreterSetting st : orderedSettings) {
+                if (setting.id().equals(st.id())) {
+                  alreadyAdded = true;
+                }
+              }
+              if (alreadyAdded == false) {
+                orderedSettings.add(setting);
+              }
+            }
+          }
+        }
+      }
+      return orderedSettings;
+    }
+  }
+
+  public InterpreterSetting get(String name) {
+    synchronized (interpreterSettings) {
+      return interpreterSettings.get(name);
+    }
+  }
+
+  public void putNoteInterpreterSettingBinding(String noteId,
+      List<String> settingList) throws IOException {
+    synchronized (interpreterSettings) {
+      interpreterBindings.put(noteId, settingList);
+      saveToFile();
+    }
+  }
+
+  public void removeNoteInterpreterSettingBinding(String noteId) {
+    synchronized (interpreterSettings) {
+      interpreterBindings.remove(noteId);
+    }
+  }
+
+  public List<String> getNoteInterpreterSettingBinding(String noteId) {
+    LinkedList<String> bindings = new LinkedList<String>();
+    synchronized (interpreterSettings) {
+      List<String> settingIds = interpreterBindings.get(noteId);
+      if (settingIds != null) {
+        bindings.addAll(settingIds);
+      }
+    }
+    return bindings;
+  }
+
+  /**
+   * Change interpreter property and restart
+   * @param name
+   * @param properties
+   * @throws IOException
+   */
+  public void setPropertyAndRestart(String id, InterpreterOption option,
+      Properties properties) throws IOException {
+    synchronized (interpreterSettings) {
+      InterpreterSetting intpsetting = interpreterSettings.get(id);
+      if (intpsetting != null) {
+        intpsetting.getInterpreterGroup().close();
+        intpsetting.getInterpreterGroup().destroy();
+
+        intpsetting.setOption(option);
+
+        InterpreterGroup interpreterGroup = createInterpreterGroup(
+            intpsetting.getGroup(), option, properties);
+        intpsetting.setInterpreterGroup(interpreterGroup);
+        saveToFile();
+      } else {
+        throw new InterpreterException("Interpreter setting id " + id
+            + " not found");
+      }
+    }
+  }
+
+  public void restart(String id) {
+    synchronized (interpreterSettings) {
+      synchronized (interpreterSettings) {
+        InterpreterSetting intpsetting = interpreterSettings.get(id);
+        if (intpsetting != null) {
+          intpsetting.getInterpreterGroup().close();
+          intpsetting.getInterpreterGroup().destroy();
+
+          InterpreterGroup interpreterGroup = createInterpreterGroup(
+              intpsetting.getGroup(), intpsetting.getOption(), intpsetting.getProperties());
+          intpsetting.setInterpreterGroup(interpreterGroup);
+        } else {
+          throw new InterpreterException("Interpreter setting id " + id
+              + " not found");
+        }
+      }
+    }
+  }
+
+
+  public void close() {
+    synchronized (interpreterSettings) {
+      synchronized (interpreterSettings) {
+        Collection<InterpreterSetting> intpsettings = interpreterSettings.values();
+        for (InterpreterSetting intpsetting : intpsettings) {
+          intpsetting.getInterpreterGroup().close();
+          intpsetting.getInterpreterGroup().destroy();
+        }
+      }
+    }
+  }
+
+  private Interpreter createRepl(String dirName, String className,
+      Properties property)
+      throws InterpreterException {
+    logger.info("Create repl {} from {}", className, dirName);
+
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    try {
+
+      URLClassLoader ccl = cleanCl.get(dirName);
+      if (ccl == null) {
+        // classloader fallback
+        ccl = URLClassLoader.newInstance(new URL[] {}, oldcl);
+      }
+
+      boolean separateCL = true;
+      try { // check if server's classloader has driver already.
+        Class cls = this.getClass().forName(className);
+        if (cls != null) {
+          separateCL = false;
+        }
+      } catch (Exception e) {
+        // nothing to do.
+      }
+
+      URLClassLoader cl;
+
+      if (separateCL == true) {
+        cl = URLClassLoader.newInstance(new URL[] {}, ccl);
+      } else {
+        cl = ccl;
+      }
+      Thread.currentThread().setContextClassLoader(cl);
+
+      Class<Interpreter> replClass = (Class<Interpreter>) cl.loadClass(className);
+      Constructor<Interpreter> constructor =
+          replClass.getConstructor(new Class[] {Properties.class});
+      Interpreter repl = constructor.newInstance(property);
+      repl.setClassloaderUrls(ccl.getURLs());
+      LazyOpenInterpreter intp = new LazyOpenInterpreter(
+          new ClassloaderInterpreter(repl, cl));
+      return intp;
+    } catch (SecurityException e) {
+      throw new InterpreterException(e);
+    } catch (NoSuchMethodException e) {
+      throw new InterpreterException(e);
+    } catch (IllegalArgumentException e) {
+      throw new InterpreterException(e);
+    } catch (InstantiationException e) {
+      throw new InterpreterException(e);
+    } catch (IllegalAccessException e) {
+      throw new InterpreterException(e);
+    } catch (InvocationTargetException e) {
+      throw new InterpreterException(e);
+    } catch (ClassNotFoundException e) {
+      throw new InterpreterException(e);
+    } finally {
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+
+  private Interpreter createRemoteRepl(String interpreterPath, String className,
+      Properties property) {
+
+    LazyOpenInterpreter intp = new LazyOpenInterpreter(new RemoteInterpreter(
+        property, className, conf.getInterpreterRemoteRunnerPath(), interpreterPath));
+    return intp;
+  }
+
+
+  private URL[] recursiveBuildLibList(File path) throws MalformedURLException {
+    URL[] urls = new URL[0];
+    if (path == null || path.exists() == false) {
+      return urls;
+    } else if (path.getName().startsWith(".")) {
+      return urls;
+    } else if (path.isDirectory()) {
+      File[] files = path.listFiles();
+      if (files != null) {
+        for (File f : files) {
+          urls = (URL[]) ArrayUtils.addAll(urls, recursiveBuildLibList(f));
+        }
+      }
+      return urls;
+    } else {
+      return new URL[] {path.toURI().toURL()};
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterInfoSaving.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterInfoSaving.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterInfoSaving.java
new file mode 100644
index 0000000..ae507d4
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterInfoSaving.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ *
+ */
+public class InterpreterInfoSaving {
+  public Map<String, InterpreterSetting> interpreterSettings;
+  public Map<String, List<String>> interpreterBindings;
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java
new file mode 100644
index 0000000..e2adecd
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+/**
+ *
+ */
+public class InterpreterOption {
+  boolean remote;
+
+  public InterpreterOption() {
+    remote = false;
+  }
+
+  public InterpreterOption(boolean remote) {
+    this.remote = remote;
+  }
+
+  public boolean isRemote() {
+    return remote;
+  }
+
+  public void setRemote(boolean remote) {
+    this.remote = remote;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSerializer.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSerializer.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSerializer.java
new file mode 100644
index 0000000..a2deb7e
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSerializer.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.lang.reflect.Type;
+
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParseException;
+import com.google.gson.JsonSerializationContext;
+import com.google.gson.JsonSerializer;
+
+
+/**
+ * Interpreter class serializer for gson
+ *
+ */
+public class InterpreterSerializer implements JsonSerializer<Interpreter>,
+  JsonDeserializer<Interpreter> {
+
+  @Override
+  public JsonElement serialize(Interpreter interpreter, Type type,
+      JsonSerializationContext context) {
+    JsonObject json = new JsonObject();
+    json.addProperty("class", interpreter.getClassName());
+    json.addProperty(
+        "name",
+        Interpreter.findRegisteredInterpreterByClassName(
+            interpreter.getClassName()).getName());
+    return json;
+  }
+
+  @Override
+  public Interpreter deserialize(JsonElement json, Type typeOfT,
+      JsonDeserializationContext context) throws JsonParseException {
+    return null;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java
new file mode 100644
index 0000000..04785aa
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.util.Properties;
+import java.util.Random;
+
+import org.apache.zeppelin.notebook.utility.IdHashes;
+
+/**
+ * Interpreter settings
+ */
+public class InterpreterSetting {
+  private String id;
+  private String name;
+  private String group;
+  private String description;
+  private Properties properties;
+  private InterpreterGroup interpreterGroup;
+  private InterpreterOption option;
+
+  public InterpreterSetting(String id, String name,
+      String group,
+      InterpreterOption option,
+      InterpreterGroup interpreterGroup) {
+    this.id = id;
+    this.name = name;
+    this.group = group;
+    this.properties = interpreterGroup.getProperty();
+    this.option = option;
+    this.interpreterGroup = interpreterGroup;
+  }
+
+  public InterpreterSetting(String name,
+      String group,
+      InterpreterOption option,
+      InterpreterGroup interpreterGroup) {
+    this(generateId(), name, group, option, interpreterGroup);
+  }
+
+  public String id() {
+    return id;
+  }
+
+  private static String generateId() {
+    return IdHashes.encode(System.currentTimeMillis() + new Random().nextInt());
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getDescription() {
+    return description;
+  }
+
+  public void setDescription(String desc) {
+    this.description = desc;
+  }
+
+  public String getGroup() {
+    return group;
+  }
+
+  public InterpreterGroup getInterpreterGroup() {
+    return interpreterGroup;
+  }
+
+  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
+    this.interpreterGroup = interpreterGroup;
+    this.properties = interpreterGroup.getProperty();
+  }
+
+  public Properties getProperties() {
+    return properties;
+  }
+
+  public InterpreterOption getOption() {
+    if (option == null) {
+      option = new InterpreterOption();
+    }
+
+    return option;
+  }
+
+  public void setOption(InterpreterOption option) {
+    this.option = option;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/JobListenerFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/JobListenerFactory.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/JobListenerFactory.java
new file mode 100644
index 0000000..23cd957
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/JobListenerFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.notebook;
+
+import org.apache.zeppelin.scheduler.JobListener;
+
+/**
+ * TODO(moon): provide description.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public interface JobListenerFactory {
+  public JobListener getParagraphJobListener(Note note);
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Note.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Note.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Note.java
new file mode 100644
index 0000000..9204a07
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Note.java
@@ -0,0 +1,367 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.notebook;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.zeppelin.conf.ZeppelinConfiguration;
+import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.notebook.utility.IdHashes;
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.JobListener;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.Job.Status;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+/**
+ * Binded interpreters for a note
+ */
+public class Note implements Serializable, JobListener {
+  transient Logger logger = LoggerFactory.getLogger(Note.class);
+  List<Paragraph> paragraphs = new LinkedList<Paragraph>();
+  private String name;
+  private String id;
+
+  private transient NoteInterpreterLoader replLoader;
+  private transient ZeppelinConfiguration conf;
+  private transient JobListenerFactory jobListenerFactory;
+
+  /**
+   * note configurations.
+   *
+   * - looknfeel - cron
+   */
+  private Map<String, Object> config = new HashMap<String, Object>();
+
+  /**
+   * note information.
+   *
+   * - cron : cron expression validity.
+   */
+  private Map<String, Object> info = new HashMap<String, Object>();
+
+  public Note() {}
+
+  public Note(ZeppelinConfiguration conf, NoteInterpreterLoader replLoader,
+      JobListenerFactory jobListenerFactory, org.quartz.Scheduler quartzSched) {
+    this.conf = conf;
+    this.replLoader = replLoader;
+    this.jobListenerFactory = jobListenerFactory;
+    generateId();
+  }
+
+  private void generateId() {
+    id = IdHashes.encode(System.currentTimeMillis() + new Random().nextInt());
+  }
+
+  public String id() {
+    return id;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public NoteInterpreterLoader getNoteReplLoader() {
+    return replLoader;
+  }
+
+  public void setReplLoader(NoteInterpreterLoader replLoader) {
+    this.replLoader = replLoader;
+  }
+
+  public void setZeppelinConfiguration(ZeppelinConfiguration conf) {
+    this.conf = conf;
+  }
+
+  /**
+   * Add paragraph last.
+   *
+   * @param p
+   */
+  public Paragraph addParagraph() {
+    Paragraph p = new Paragraph(this, replLoader);
+    synchronized (paragraphs) {
+      paragraphs.add(p);
+    }
+    return p;
+  }
+
+  /**
+   * Insert paragraph in given index.
+   *
+   * @param index
+   * @param p
+   */
+  public Paragraph insertParagraph(int index) {
+    Paragraph p = new Paragraph(this, replLoader);
+    synchronized (paragraphs) {
+      paragraphs.add(index, p);
+    }
+    return p;
+  }
+
+  /**
+   * Remove paragraph by id.
+   *
+   * @param paragraphId
+   * @return
+   */
+  public Paragraph removeParagraph(String paragraphId) {
+    synchronized (paragraphs) {
+      for (int i = 0; i < paragraphs.size(); i++) {
+        Paragraph p = paragraphs.get(i);
+        if (p.getId().equals(paragraphId)) {
+          paragraphs.remove(i);
+          return p;
+        }
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Move paragraph into the new index (order from 0 ~ n-1).
+   *
+   * @param paragraphId
+   * @param index new index
+   */
+  public void moveParagraph(String paragraphId, int index) {
+    synchronized (paragraphs) {
+      int oldIndex = -1;
+      Paragraph p = null;
+
+      if (index < 0 || index >= paragraphs.size()) {
+        return;
+      }
+
+      for (int i = 0; i < paragraphs.size(); i++) {
+        if (paragraphs.get(i).getId().equals(paragraphId)) {
+          oldIndex = i;
+          if (oldIndex == index) {
+            return;
+          }
+          p = paragraphs.remove(i);
+        }
+      }
+
+      if (p == null) {
+        return;
+      } else {
+        if (oldIndex < index) {
+          paragraphs.add(index, p);
+        } else {
+          paragraphs.add(index, p);
+        }
+      }
+    }
+  }
+
+  public boolean isLastParagraph(String paragraphId) {
+    if (!paragraphs.isEmpty()) {
+      synchronized (paragraphs) {
+        if (paragraphId.equals(paragraphs.get(paragraphs.size() - 1).getId())) {
+          return true;
+        }
+      }
+      return false;
+    }
+    /** because empty list, cannot remove nothing right? */
+    return true;
+  }
+
+  public Paragraph getParagraph(String paragraphId) {
+    synchronized (paragraphs) {
+      for (Paragraph p : paragraphs) {
+        if (p.getId().equals(paragraphId)) {
+          return p;
+        }
+      }
+    }
+    return null;
+  }
+
+  public Paragraph getLastParagraph() {
+    synchronized (paragraphs) {
+      return paragraphs.get(paragraphs.size() - 1);
+    }
+  }
+
+  /**
+   * Run all paragraphs sequentially.
+   *
+   * @param jobListener
+   */
+  public void runAll() {
+    synchronized (paragraphs) {
+      for (Paragraph p : paragraphs) {
+        p.setNoteReplLoader(replLoader);
+        p.setListener(jobListenerFactory.getParagraphJobListener(this));
+        Interpreter intp = replLoader.get(p.getRequiredReplName());
+        intp.getScheduler().submit(p);
+      }
+    }
+  }
+
+  /**
+   * Run a single paragraph.
+   *
+   * @param paragraphId
+   */
+  public void run(String paragraphId) {
+    Paragraph p = getParagraph(paragraphId);
+    p.setNoteReplLoader(replLoader);
+    p.setListener(jobListenerFactory.getParagraphJobListener(this));
+    Interpreter intp = replLoader.get(p.getRequiredReplName());
+    if (intp == null) {
+      throw new InterpreterException("Interpreter " + p.getRequiredReplName() + " not found");
+    }
+    intp.getScheduler().submit(p);
+  }
+
+  public List<String> completion(String paragraphId, String buffer, int cursor) {
+    Paragraph p = getParagraph(paragraphId);
+    p.setNoteReplLoader(replLoader);
+    p.setListener(jobListenerFactory.getParagraphJobListener(this));
+    return p.completion(buffer, cursor);
+  }
+
+  public List<Paragraph> getParagraphs() {
+    synchronized (paragraphs) {
+      return new LinkedList<Paragraph>(paragraphs);
+    }
+  }
+
+  public void persist() throws IOException {
+    GsonBuilder gsonBuilder = new GsonBuilder();
+    gsonBuilder.setPrettyPrinting();
+    Gson gson = gsonBuilder.create();
+
+    File dir = new File(conf.getNotebookDir() + "/" + id);
+    if (!dir.exists()) {
+      dir.mkdirs();
+    } else if (dir.isFile()) {
+      throw new RuntimeException("File already exists" + dir.toString());
+    }
+
+    File file = new File(conf.getNotebookDir() + "/" + id + "/note.json");
+    logger().info("Persist note {} into {}", id, file.getAbsolutePath());
+
+    String json = gson.toJson(this);
+    FileOutputStream out = new FileOutputStream(file);
+    out.write(json.getBytes(conf.getString(ConfVars.ZEPPELIN_ENCODING)));
+    out.close();
+  }
+
+  public void unpersist() throws IOException {
+    File dir = new File(conf.getNotebookDir() + "/" + id);
+
+    FileUtils.deleteDirectory(dir);
+  }
+
+  public static Note load(String id, ZeppelinConfiguration conf, NoteInterpreterLoader replLoader,
+      Scheduler scheduler, JobListenerFactory jobListenerFactory, org.quartz.Scheduler quartzSched)
+      throws IOException {
+    GsonBuilder gsonBuilder = new GsonBuilder();
+    gsonBuilder.setPrettyPrinting();
+    Gson gson = gsonBuilder.create();
+
+    File file = new File(conf.getNotebookDir() + "/" + id + "/note.json");
+    logger().info("Load note {} from {}", id, file.getAbsolutePath());
+
+    if (!file.isFile()) {
+      return null;
+    }
+
+    FileInputStream ins = new FileInputStream(file);
+    String json = IOUtils.toString(ins, conf.getString(ConfVars.ZEPPELIN_ENCODING));
+    Note note = gson.fromJson(json, Note.class);
+    note.setZeppelinConfiguration(conf);
+    note.setReplLoader(replLoader);
+    note.jobListenerFactory = jobListenerFactory;
+    for (Paragraph p : note.paragraphs) {
+      if (p.getStatus() == Status.PENDING || p.getStatus() == Status.RUNNING) {
+        p.setStatus(Status.ABORT);
+      }
+    }
+
+    return note;
+  }
+
+  public Map<String, Object> getConfig() {
+    if (config == null) {
+      config = new HashMap<String, Object>();
+    }
+    return config;
+  }
+
+  public void setConfig(Map<String, Object> config) {
+    this.config = config;
+  }
+
+  public Map<String, Object> getInfo() {
+    if (info == null) {
+      info = new HashMap<String, Object>();
+    }
+    return info;
+  }
+
+  public void setInfo(Map<String, Object> info) {
+    this.info = info;
+  }
+
+  @Override
+  public void beforeStatusChange(Job job, Status before, Status after) {
+    Paragraph p = (Paragraph) job;
+  }
+
+  @Override
+  public void afterStatusChange(Job job, Status before, Status after) {
+    Paragraph p = (Paragraph) job;
+  }
+
+  private static Logger logger() {
+    Logger logger = LoggerFactory.getLogger(Note.class);
+    return logger;
+  }
+
+  @Override
+  public void onProgressUpdate(Job job, int progress) {}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteInterpreterLoader.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteInterpreterLoader.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteInterpreterLoader.java
new file mode 100644
index 0000000..b1fd7b9
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteInterpreterLoader.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.notebook;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.InterpreterFactory;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterSetting;
+
+/**
+ * Repl loader per note.
+ */
+public class NoteInterpreterLoader {
+  private transient InterpreterFactory factory;
+  String noteId;
+
+  public NoteInterpreterLoader(InterpreterFactory factory) {
+    this.factory = factory;
+  }
+
+  public void setNoteId(String noteId) {
+    this.noteId = noteId;
+  }
+
+  /**
+   * set interpreter ids
+   * @param ids InterpreterSetting id list
+   * @throws IOException
+   */
+  public void setInterpreters(List<String> ids) throws IOException {
+    factory.putNoteInterpreterSettingBinding(noteId, ids);
+  }
+
+  public List<String> getInterpreters() {
+    return factory.getNoteInterpreterSettingBinding(noteId);
+  }
+
+  public List<InterpreterSetting> getInterpreterSettings() {
+    List<String> interpreterSettingIds = factory.getNoteInterpreterSettingBinding(noteId);
+    LinkedList<InterpreterSetting> settings = new LinkedList<InterpreterSetting>();
+    synchronized (interpreterSettingIds) {
+      for (String id : interpreterSettingIds) {
+        InterpreterSetting setting = factory.get(id);
+        if (setting == null) {
+          // interpreter setting is removed from factory. remove id from here, too
+          interpreterSettingIds.remove(id);
+        } else {
+          settings.add(setting);
+        }
+      }
+    }
+    return settings;
+  }
+
+  public Interpreter get(String replName) {
+    List<InterpreterSetting> settings = getInterpreterSettings();
+
+    if (settings == null || settings.size() == 0) {
+      return null;
+    }
+
+    if (replName == null) {
+      return settings.get(0).getInterpreterGroup().getFirst();
+    }
+
+    if (Interpreter.registeredInterpreters == null) {
+      return null;
+    }
+    Interpreter.RegisteredInterpreter registeredInterpreter
+      = Interpreter.registeredInterpreters.get(replName);
+    if (registeredInterpreter == null || registeredInterpreter.getClassName() == null) {
+      throw new InterpreterException(replName + " interpreter not found");
+    }
+    String interpreterClassName = registeredInterpreter.getClassName();
+
+    for (InterpreterSetting setting : settings) {
+      InterpreterGroup intpGroup = setting.getInterpreterGroup();
+      for (Interpreter interpreter : intpGroup) {
+        if (interpreterClassName.equals(interpreter.getClassName())) {
+          return interpreter;
+        }
+      }
+    }
+
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Notebook.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Notebook.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Notebook.java
new file mode 100644
index 0000000..2d9ba36
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Notebook.java
@@ -0,0 +1,299 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.notebook;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.zeppelin.conf.ZeppelinConfiguration;
+import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
+import org.apache.zeppelin.interpreter.InterpreterFactory;
+import org.apache.zeppelin.interpreter.InterpreterSetting;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.quartz.CronScheduleBuilder;
+import org.quartz.CronTrigger;
+import org.quartz.JobBuilder;
+import org.quartz.JobDetail;
+import org.quartz.JobExecutionContext;
+import org.quartz.JobExecutionException;
+import org.quartz.JobKey;
+import org.quartz.SchedulerException;
+import org.quartz.TriggerBuilder;
+import org.quartz.impl.StdSchedulerFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Collection of Notes.
+ */
+public class Notebook {
+  Logger logger = LoggerFactory.getLogger(Notebook.class);
+  private SchedulerFactory schedulerFactory;
+  private InterpreterFactory replFactory;
+  /** Keep the order. */
+  Map<String, Note> notes = new LinkedHashMap<String, Note>();
+  private ZeppelinConfiguration conf;
+  private StdSchedulerFactory quertzSchedFact;
+  private org.quartz.Scheduler quartzSched;
+  private JobListenerFactory jobListenerFactory;
+
+  public Notebook(ZeppelinConfiguration conf, SchedulerFactory schedulerFactory,
+      InterpreterFactory replFactory, JobListenerFactory jobListenerFactory) throws IOException,
+      SchedulerException {
+    this.conf = conf;
+    this.schedulerFactory = schedulerFactory;
+    this.replFactory = replFactory;
+    this.jobListenerFactory = jobListenerFactory;
+    quertzSchedFact = new org.quartz.impl.StdSchedulerFactory();
+    quartzSched = quertzSchedFact.getScheduler();
+    quartzSched.start();
+    CronJob.notebook = this;
+
+    loadAllNotes();
+  }
+
+  /**
+   * Create new note.
+   *
+   * @return
+   * @throws IOException
+   */
+  public Note createNote() throws IOException {
+    if (conf.getBoolean(ConfVars.ZEPPELIN_NOTEBOOK_AUTO_INTERPRETER_BINDING)) {
+      return createNote(replFactory.getDefaultInterpreterSettingList());
+    } else {
+      return createNote(null);
+    }
+  }
+
+  /**
+   * Create new note.
+   *
+   * @return
+   * @throws IOException
+   */
+  public Note createNote(List<String> interpreterIds) throws IOException {
+    NoteInterpreterLoader intpLoader = new NoteInterpreterLoader(replFactory);
+    Note note = new Note(conf, intpLoader, jobListenerFactory, quartzSched);
+    intpLoader.setNoteId(note.id());
+    synchronized (notes) {
+      notes.put(note.id(), note);
+    }
+    if (interpreterIds != null) {
+      bindInterpretersToNote(note.id(), interpreterIds);
+    }
+
+    return note;
+  }
+
+  public void bindInterpretersToNote(String id,
+      List<String> interpreterSettingIds) throws IOException {
+    Note note = getNote(id);
+    if (note != null) {
+      note.getNoteReplLoader().setInterpreters(interpreterSettingIds);
+      replFactory.putNoteInterpreterSettingBinding(id, interpreterSettingIds);
+    }
+  }
+
+  public List<String> getBindedInterpreterSettingsIds(String id) {
+    Note note = getNote(id);
+    if (note != null) {
+      return note.getNoteReplLoader().getInterpreters();
+    } else {
+      return new LinkedList<String>();
+    }
+  }
+
+  public List<InterpreterSetting> getBindedInterpreterSettings(String id) {
+    Note note = getNote(id);
+    if (note != null) {
+      return note.getNoteReplLoader().getInterpreterSettings();
+    } else {
+      return new LinkedList<InterpreterSetting>();
+    }
+  }
+
+  public Note getNote(String id) {
+    synchronized (notes) {
+      return notes.get(id);
+    }
+  }
+
+  public void removeNote(String id) {
+    Note note;
+    synchronized (notes) {
+      note = notes.remove(id);
+    }
+    try {
+      note.unpersist();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+  private void loadAllNotes() throws IOException {
+    File notebookDir = new File(conf.getNotebookDir());
+    File[] dirs = notebookDir.listFiles();
+    if (dirs == null) {
+      return;
+    }
+    for (File f : dirs) {
+      boolean isHidden = f.getName().startsWith(".");
+      if (f.isDirectory() && !isHidden) {
+        Scheduler scheduler =
+            schedulerFactory.createOrGetFIFOScheduler("note_" + System.currentTimeMillis());
+        logger.info("Loading note from " + f.getName());
+        NoteInterpreterLoader noteInterpreterLoader = new NoteInterpreterLoader(replFactory);
+        Note note = Note.load(f.getName(),
+            conf,
+            noteInterpreterLoader,
+            scheduler,
+            jobListenerFactory, quartzSched);
+        noteInterpreterLoader.setNoteId(note.id());
+
+        synchronized (notes) {
+          notes.put(note.id(), note);
+          refreshCron(note.id());
+        }
+      }
+    }
+  }
+
+  public List<Note> getAllNotes() {
+    synchronized (notes) {
+      List<Note> noteList = new ArrayList<Note>(notes.values());
+      logger.info("" + noteList.size());
+      Collections.sort(noteList, new Comparator() {
+        @Override
+        public int compare(Object one, Object two) {
+          Note note1 = (Note) one;
+          Note note2 = (Note) two;
+
+          String name1 = note1.id();
+          if (note1.getName() != null) {
+            name1 = note1.getName();
+          }
+          String name2 = note2.id();
+          if (note2.getName() != null) {
+            name2 = note2.getName();
+          }
+          ((Note) one).getName();
+          return name1.compareTo(name2);
+        }
+      });
+      return noteList;
+    }
+  }
+
+  public JobListenerFactory getJobListenerFactory() {
+    return jobListenerFactory;
+  }
+
+  public void setJobListenerFactory(JobListenerFactory jobListenerFactory) {
+    this.jobListenerFactory = jobListenerFactory;
+  }
+
+  /**
+   * Cron task for the note.
+   *
+   * @author Leemoonsoo
+   *
+   */
+  public static class CronJob implements org.quartz.Job {
+    public static Notebook notebook;
+
+    @Override
+    public void execute(JobExecutionContext context) throws JobExecutionException {
+
+      String noteId = context.getJobDetail().getJobDataMap().getString("noteId");
+      Note note = notebook.getNote(noteId);
+      note.runAll();
+    }
+  }
+
+  public void refreshCron(String id) {
+    removeCron(id);
+    synchronized (notes) {
+
+      Note note = notes.get(id);
+      if (note == null) {
+        return;
+      }
+      Map<String, Object> config = note.getConfig();
+      if (config == null) {
+        return;
+      }
+
+      String cronExpr = (String) note.getConfig().get("cron");
+      if (cronExpr == null || cronExpr.trim().length() == 0) {
+        return;
+      }
+
+
+      JobDetail newJob =
+          JobBuilder.newJob(CronJob.class).withIdentity(id, "note").usingJobData("noteId", id)
+          .build();
+
+      Map<String, Object> info = note.getInfo();
+      info.put("cron", null);
+
+      CronTrigger trigger = null;
+      try {
+        trigger =
+            TriggerBuilder.newTrigger().withIdentity("trigger_" + id, "note")
+            .withSchedule(CronScheduleBuilder.cronSchedule(cronExpr)).forJob(id, "note")
+            .build();
+      } catch (Exception e) {
+        logger.error("Error", e);
+        info.put("cron", e.getMessage());
+      }
+
+
+      try {
+        if (trigger != null) {
+          quartzSched.scheduleJob(newJob, trigger);
+        }
+      } catch (SchedulerException e) {
+        logger.error("Error", e);
+        info.put("cron", "Scheduler Exception");
+      }
+    }
+  }
+
+  private void removeCron(String id) {
+    try {
+      quartzSched.deleteJob(new JobKey(id, "note"));
+    } catch (SchedulerException e) {
+      logger.error("Can't remove quertz " + id, e);
+    }
+  }
+
+  public InterpreterFactory getInterpreterFactory() {
+    return replFactory;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Paragraph.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Paragraph.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Paragraph.java
new file mode 100644
index 0000000..e0986bf
--- /dev/null
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Paragraph.java
@@ -0,0 +1,237 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.notebook;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.display.Input;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.Interpreter.FormType;
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.JobListener;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Paragraph is a representation of an execution unit.
+ *
+ * @author Leemoonsoo
+ */
+public class Paragraph extends Job implements Serializable {
+  private static final transient long serialVersionUID = -6328572073497992016L;
+  private transient NoteInterpreterLoader replLoader;
+
+  String title;
+  String text;
+  private Map<String, Object> config; // paragraph configs like isOpen, colWidth, etc
+  public final GUI settings;          // form and parameter settings
+
+  public Paragraph(JobListener listener, NoteInterpreterLoader replLoader) {
+    super(generateId(), listener);
+    this.replLoader = replLoader;
+    title = null;
+    text = null;
+    settings = new GUI();
+    config = new HashMap<String, Object>();
+  }
+
+  private static String generateId() {
+    return "paragraph_" + System.currentTimeMillis() + "_"
+           + new Random(System.currentTimeMillis()).nextInt();
+  }
+
+  public String getText() {
+    return text;
+  }
+
+  public void setText(String newText) {
+    this.text = newText;
+  }
+
+
+  public String getTitle() {
+    return title;
+  }
+
+  public void setTitle(String title) {
+    this.title = title;
+  }
+
+  public String getRequiredReplName() {
+    return getRequiredReplName(text);
+  }
+
+  public static String getRequiredReplName(String text) {
+    if (text == null) {
+      return null;
+    }
+
+    // get script head
+    int scriptHeadIndex = 0;
+    for (int i = 0; i < text.length(); i++) {
+      char ch = text.charAt(i);
+      if (ch == ' ' || ch == '\n') {
+        scriptHeadIndex = i;
+        break;
+      }
+    }
+    if (scriptHeadIndex == 0) {
+      return null;
+    }
+    String head = text.substring(0, scriptHeadIndex);
+    if (head.startsWith("%")) {
+      return head.substring(1);
+    } else {
+      return null;
+    }
+  }
+
+  private String getScriptBody() {
+    return getScriptBody(text);
+  }
+
+  public static String getScriptBody(String text) {
+    if (text == null) {
+      return null;
+    }
+
+    String magic = getRequiredReplName(text);
+    if (magic == null) {
+      return text;
+    }
+    if (magic.length() + 2 >= text.length()) {
+      return "";
+    }
+    return text.substring(magic.length() + 2);
+  }
+
+  public NoteInterpreterLoader getNoteReplLoader() {
+    return replLoader;
+  }
+
+  public Interpreter getRepl(String name) {
+    return replLoader.get(name);
+  }
+
+  public List<String> completion(String buffer, int cursor) {
+    String replName = getRequiredReplName(buffer);
+    if (replName != null) {
+      cursor -= replName.length() + 1;
+    }
+    String body = getScriptBody(buffer);
+    Interpreter repl = getRepl(replName);
+    if (repl == null) {
+      return null;
+    }
+
+    return repl.completion(body, cursor);
+  }
+
+  public void setNoteReplLoader(NoteInterpreterLoader repls) {
+    this.replLoader = repls;
+  }
+
+  public InterpreterResult getResult() {
+    return (InterpreterResult) getReturn();
+  }
+
+  @Override
+  public int progress() {
+    String replName = getRequiredReplName();
+    Interpreter repl = getRepl(replName);
+    if (repl != null) {
+      return repl.getProgress(getInterpreterContext());
+    } else {
+      return 0;
+    }
+  }
+
+  @Override
+  public Map<String, Object> info() {
+    return null;
+  }
+
+  @Override
+  protected Object jobRun() throws Throwable {
+    String replName = getRequiredReplName();
+    Interpreter repl = getRepl(replName);
+    logger().info("run paragraph {} using {} " + repl, getId(), replName);
+    if (repl == null) {
+      logger().error("Can not find interpreter name " + repl);
+      throw new RuntimeException("Can not find interpreter for " + getRequiredReplName());
+    }
+
+    String script = getScriptBody();
+    // inject form
+    if (repl.getFormType() == FormType.NATIVE) {
+      settings.clear();
+    } else if (repl.getFormType() == FormType.SIMPLE) {
+      String scriptBody = getScriptBody();
+      Map<String, Input> inputs = Input.extractSimpleQueryParam(scriptBody); // inputs will be built
+                                                                             // from script body
+      settings.setForms(inputs);
+      script = Input.getSimpleQuery(settings.getParams(), scriptBody);
+    }
+    logger().info("RUN : " + script);
+    InterpreterResult ret = repl.interpret(script, getInterpreterContext());
+    return ret;
+  }
+
+  @Override
+  protected boolean jobAbort() {
+    Interpreter repl = getRepl(getRequiredReplName());
+    repl.cancel(getInterpreterContext());
+    return true;
+  }
+
+  private InterpreterContext getInterpreterContext() {
+    InterpreterContext interpreterContext = new InterpreterContext(getId(),
+            this.getTitle(),
+            this.getText(),
+            this.getConfig(),
+            this.settings);
+    return interpreterContext;
+  }
+
+  private Logger logger() {
+    Logger logger = LoggerFactory.getLogger(Paragraph.class);
+    return logger;
+  }
+
+
+  public Map<String, Object> getConfig() {
+    return config;
+  }
+
+  public void setConfig(Map<String, Object> config) {
+    this.config = config;
+  }
+
+  public void setReturn(InterpreterResult value, Throwable t) {
+    setResult(value);
+    setException(t);
+
+  }
+}


[13/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/pom.xml b/zeppelin-interpreter/pom.xml
index de4bbc8..d824dfe 100644
--- a/zeppelin-interpreter/pom.xml
+++ b/zeppelin-interpreter/pom.xml
@@ -1,15 +1,32 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
 
-  <groupId>com.nflabs.zeppelin</groupId>
+  <groupId>org.apache.zeppelin</groupId>
   <artifactId>zeppelin-interpreter</artifactId>
   <packaging>jar</packaging>
   <version>0.5.0-SNAPSHOT</version>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/display/GUI.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/display/GUI.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/display/GUI.java
deleted file mode 100644
index 51ae222..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/display/GUI.java
+++ /dev/null
@@ -1,68 +0,0 @@
-package com.nflabs.zeppelin.display;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.TreeMap;
-
-import com.nflabs.zeppelin.display.Input.ParamOption;
-
-/**
- * Settings of a form.
- *
- * @author Leemoonsoo
- *
- */
-public class GUI implements Serializable {
-
-  Map<String, Object> params = new HashMap<String, Object>(); // form parameters from client
-  Map<String, Input> forms = new TreeMap<String, Input>(); // form configuration
-
-  public GUI() {
-
-  }
-
-  public void setParams(Map<String, Object> values) {
-    this.params = values;
-  }
-
-  public Map<String, Object> getParams() {
-    return params;
-  }
-
-  public Map<String, Input> getForms() {
-    return forms;
-  }
-
-  public void setForms(Map<String, Input> forms) {
-    this.forms = forms;
-  }
-
-  public Object input(String id, Object defaultValue) {
-    // first find values from client and then use default
-    Object value = params.get(id);
-    if (value == null) {
-      value = defaultValue;
-    }
-
-    forms.put(id, new Input(id, defaultValue));
-    return value;
-  }
-
-  public Object input(String id) {
-    return input(id, "");
-  }
-
-  public Object select(String id, Object defaultValue, ParamOption[] options) {
-    Object value = params.get(id);
-    if (value == null) {
-      value = defaultValue;
-    }
-    forms.put(id, new Input(id, defaultValue, options));
-    return value;
-  }
-
-  public void clear() {
-    this.forms = new TreeMap<String, Input>();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/display/Input.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/display/Input.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/display/Input.java
deleted file mode 100644
index 54ef717..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/display/Input.java
+++ /dev/null
@@ -1,458 +0,0 @@
-package com.nflabs.zeppelin.display;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * Input type.
- * 
- * @author Leemoonsoo
- *
- */
-public class Input implements Serializable {
-  /**
-   * Parameters option.
-   * 
-   * @author Leemoonsoo
-   *
-   */
-  public static class ParamOption {
-    Object value;
-    String displayName;
-
-    public ParamOption(Object value, String displayName) {
-      super();
-      this.value = value;
-      this.displayName = displayName;
-    }
-
-    public Object getValue() {
-      return value;
-    }
-
-    public void setValue(Object value) {
-      this.value = value;
-    }
-
-    public String getDisplayName() {
-      return displayName;
-    }
-
-    public void setDisplayName(String displayName) {
-      this.displayName = displayName;
-    }
-
-  }
-
-  String name;
-  String displayName;
-  String type;
-  Object defaultValue;
-  ParamOption[] options;
-  boolean hidden;
-
-  public Input(String name, Object defaultValue) {
-    this.name = name;
-    this.displayName = name;
-    this.defaultValue = defaultValue;
-  }
-
-  public Input(String name, Object defaultValue, ParamOption[] options) {
-    this.name = name;
-    this.displayName = name;
-    this.defaultValue = defaultValue;
-    this.options = options;
-  }
-
-
-  public Input(String name, String displayName, String type, Object defaultValue,
-      ParamOption[] options, boolean hidden) {
-    super();
-    this.name = name;
-    this.displayName = displayName;
-    this.type = type;
-    this.defaultValue = defaultValue;
-    this.options = options;
-    this.hidden = hidden;
-  }
-
-  public boolean equals(Object o) {
-    return name.equals(((Input) o).getName());
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  public String getDisplayName() {
-    return displayName;
-  }
-
-  public void setDisplayName(String displayName) {
-    this.displayName = displayName;
-  }
-
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
-
-  public Object getDefaultValue() {
-    return defaultValue;
-  }
-
-  public void setDefaultValue(Object defaultValue) {
-    this.defaultValue = defaultValue;
-  }
-
-  public ParamOption[] getOptions() {
-    return options;
-  }
-
-  public void setOptions(ParamOption[] options) {
-    this.options = options;
-  }
-
-  public boolean isHidden() {
-    return hidden;
-  }
-
-
-  private static String[] getNameAndDisplayName(String str) {
-    Pattern p = Pattern.compile("([^(]*)\\s*[(]([^)]*)[)]");
-    Matcher m = p.matcher(str.trim());
-    if (m == null || m.find() == false) {
-      return null;
-    }
-    String[] ret = new String[2];
-    ret[0] = m.group(1);
-    ret[1] = m.group(2);
-    return ret;
-  }
-
-  private static String[] getType(String str) {
-    Pattern p = Pattern.compile("([^:]*)\\s*:\\s*(.*)");
-    Matcher m = p.matcher(str.trim());
-    if (m == null || m.find() == false) {
-      return null;
-    }
-    String[] ret = new String[2];
-    ret[0] = m.group(1).trim();
-    ret[1] = m.group(2).trim();
-    return ret;
-  }
-
-  public static Map<String, Input> extractSimpleQueryParam(String script) {
-    Map<String, Input> params = new HashMap<String, Input>();
-    if (script == null) {
-      return params;
-    }
-    String replaced = script;
-
-    Pattern pattern = Pattern.compile("([_])?[$][{]([^=}]*([=][^}]*)?)[}]");
-
-    Matcher match = pattern.matcher(replaced);
-    while (match.find()) {
-      String hiddenPart = match.group(1);
-      boolean hidden = false;
-      if ("_".equals(hiddenPart)) {
-        hidden = true;
-      }
-      String m = match.group(2);
-
-      String namePart;
-      String valuePart;
-
-      int p = m.indexOf('=');
-      if (p > 0) {
-        namePart = m.substring(0, p);
-        valuePart = m.substring(p + 1);
-      } else {
-        namePart = m;
-        valuePart = null;
-      }
-
-
-      String varName;
-      String displayName = null;
-      String type = null;
-      String defaultValue = "";
-      ParamOption[] paramOptions = null;
-
-      // get var name type
-      String varNamePart;
-      String[] typeArray = getType(namePart);
-      if (typeArray != null) {
-        type = typeArray[0];
-        varNamePart = typeArray[1];
-      } else {
-        varNamePart = namePart;
-      }
-
-      // get var name and displayname
-      String[] varNameArray = getNameAndDisplayName(varNamePart);
-      if (varNameArray != null) {
-        varName = varNameArray[0];
-        displayName = varNameArray[1];
-      } else {
-        varName = varNamePart.trim();
-      }
-
-      // get defaultValue
-      if (valuePart != null) {
-        // find default value
-        int optionP = valuePart.indexOf(",");
-        if (optionP > 0) { // option available
-          defaultValue = valuePart.substring(0, optionP);
-          String optionPart = valuePart.substring(optionP + 1);
-          String[] options = Input.splitPipe(optionPart);
-
-          paramOptions = new ParamOption[options.length];
-
-          for (int i = 0; i < options.length; i++) {
-
-            String[] optNameArray = getNameAndDisplayName(options[i]);
-            if (optNameArray != null) {
-              paramOptions[i] = new ParamOption(optNameArray[0], optNameArray[1]);
-            } else {
-              paramOptions[i] = new ParamOption(options[i], null);
-            }
-          }
-
-
-        } else { // no option
-          defaultValue = valuePart;
-        }
-
-      }
-
-      Input param = new Input(varName, displayName, type, defaultValue, paramOptions, hidden);
-      params.put(varName, param);
-    }
-
-    params.remove("pql");
-    return params;
-  }
-
-  public static String getSimpleQuery(Map<String, Object> params, String script) {
-    String replaced = script;
-
-    for (String key : params.keySet()) {
-      Object value = params.get(key);
-      replaced =
-          replaced.replaceAll("[_]?[$][{]([^:]*[:])?" + key + "([(][^)]*[)])?(=[^}]*)?[}]",
-                              value.toString());
-    }
-
-    Pattern pattern = Pattern.compile("[$][{]([^=}]*[=][^}]*)[}]");
-    while (true) {
-      Matcher match = pattern.matcher(replaced);
-      if (match != null && match.find()) {
-        String m = match.group(1);
-        int p = m.indexOf('=');
-        String replacement = m.substring(p + 1);
-        int optionP = replacement.indexOf(",");
-        if (optionP > 0) {
-          replacement = replacement.substring(0, optionP);
-        }
-        replaced =
-            replaced.replaceFirst("[_]?[$][{]"
-                + m.replaceAll("[(]", ".").replaceAll("[)]", ".").replaceAll("[|]", ".") + "[}]",
-                replacement);
-      } else {
-        break;
-      }
-    }
-
-    replaced = replaced.replace("[_]?[$][{]([^=}]*)[}]", "");
-    return replaced;
-  }
-
-
-  public static String[] split(String str) {
-    return str.split(";(?=([^\"']*\"[^\"']*\")*[^\"']*$)");
-
-  }
-
-  /*
-   * public static String [] splitPipe(String str){ //return
-   * str.split("\\|(?=([^\"']*\"[^\"']*\")*[^\"']*$)"); return
-   * str.split("\\|(?=([^\"']*\"[^\"']*\")*[^\"']*$)"); }
-   */
-
-
-  public static String[] splitPipe(String str) {
-    return split(str, '|');
-  }
-
-  public static String[] split(String str, char split) {
-    return split(str, new String[] {String.valueOf(split)}, false);
-  }
-
-  public static String[] split(String str, String[] splitters, boolean includeSplitter) {
-    String escapeSeq = "\"',;${}";
-    char escapeChar = '\\';
-
-    String[] blockStart = new String[] {"\"", "'", "${", "N_(", "N_<"};
-    String[] blockEnd = new String[] {"\"", "'", "}", "N_)", "N_>"};
-
-    return split(str, escapeSeq, escapeChar, blockStart, blockEnd, splitters, includeSplitter);
-
-  }
-
-  public static String[] split(String str, String escapeSeq, char escapeChar, String[] blockStart,
-      String[] blockEnd, String[] splitters, boolean includeSplitter) {
-
-    List<String> splits = new ArrayList<String>();
-
-    String curString = "";
-
-    boolean escape = false; // true when escape char is found
-    int lastEscapeOffset = -1;
-    int blockStartPos = -1;
-    List<Integer> blockStack = new LinkedList<Integer>();
-
-    for (int i = 0; i < str.length(); i++) {
-      char c = str.charAt(i);
-
-      // escape char detected
-      if (c == escapeChar && escape == false) {
-        escape = true;
-        continue;
-      }
-
-      // escaped char comes
-      if (escape == true) {
-        if (escapeSeq.indexOf(c) < 0) {
-          curString += escapeChar;
-        }
-        curString += c;
-        escape = false;
-        lastEscapeOffset = curString.length();
-        continue;
-      }
-
-      if (blockStack.size() > 0) { // inside of block
-        curString += c;
-        // check multichar block
-        boolean multicharBlockDetected = false;
-        for (int b = 0; b < blockStart.length; b++) {
-          if (blockStartPos >= 0
-              && getBlockStr(blockStart[b]).compareTo(str.substring(blockStartPos, i)) == 0) {
-            blockStack.remove(0);
-            blockStack.add(0, b);
-            multicharBlockDetected = true;
-            break;
-          }
-        }
-        
-        if (multicharBlockDetected == true) {
-          continue;
-        }
-
-        // check if current block is nestable
-        if (isNestedBlock(blockStart[blockStack.get(0)]) == true) {
-          // try to find nested block start
-
-          if (curString.substring(lastEscapeOffset + 1).endsWith(
-              getBlockStr(blockStart[blockStack.get(0)])) == true) {
-            blockStack.add(0, blockStack.get(0)); // block is started
-            blockStartPos = i;
-            continue;
-          }
-        }
-
-        // check if block is finishing
-        if (curString.substring(lastEscapeOffset + 1).endsWith(
-            getBlockStr(blockEnd[blockStack.get(0)]))) {
-          // the block closer is one of the splitters (and not nested block)
-          if (isNestedBlock(blockEnd[blockStack.get(0)]) == false) {
-            for (String splitter : splitters) {
-              if (splitter.compareTo(getBlockStr(blockEnd[blockStack.get(0)])) == 0) {
-                splits.add(curString);
-                if (includeSplitter == true) {
-                  splits.add(splitter);
-                }
-                curString = "";
-                lastEscapeOffset = -1;
-
-                break;
-              }
-            }
-          }
-          blockStartPos = -1;
-          blockStack.remove(0);
-          continue;
-        }
-
-      } else { // not in the block
-        boolean splitted = false;
-        for (String splitter : splitters) {
-          // forward check for splitter
-          int curentLenght = i + splitter.length();
-          if (splitter.compareTo(str.substring(i, Math.min(curentLenght, str.length()))) == 0) {
-            splits.add(curString);
-            if (includeSplitter == true) {
-              splits.add(splitter);
-            }
-            curString = "";
-            lastEscapeOffset = -1;
-            i += splitter.length() - 1;
-            splitted = true;
-            break;
-          }
-        }
-        if (splitted == true) {
-          continue;
-        }
-
-        // add char to current string
-        curString += c;
-
-        // check if block is started
-        for (int b = 0; b < blockStart.length; b++) {
-          if (curString.substring(lastEscapeOffset + 1)
-                       .endsWith(getBlockStr(blockStart[b])) == true) {
-            blockStack.add(0, b); // block is started
-            blockStartPos = i;
-            break;
-          }
-        }
-      }
-    }
-    if (curString.length() > 0) {
-      splits.add(curString.trim());
-    }
-    return splits.toArray(new String[] {});
-
-  }
-
-  private static String getBlockStr(String blockDef) {
-    if (blockDef.startsWith("N_")) {
-      return blockDef.substring("N_".length());
-    } else {
-      return blockDef;
-    }
-  }
-
-  private static boolean isNestedBlock(String blockDef) {
-    if (blockDef.startsWith("N_")) {
-      return true;
-    } else {
-      return false;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/ClassloaderInterpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/ClassloaderInterpreter.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/ClassloaderInterpreter.java
deleted file mode 100644
index f8d8bbf..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/ClassloaderInterpreter.java
+++ /dev/null
@@ -1,261 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.net.URL;
-import java.util.List;
-import java.util.Properties;
-
-import com.nflabs.zeppelin.scheduler.Scheduler;
-
-/**
- * Add to the classpath interpreters.
- *
- */
-public class ClassloaderInterpreter
-    extends Interpreter
-    implements WrappedInterpreter {
-
-  private ClassLoader cl;
-  private Interpreter intp;
-
-  public ClassloaderInterpreter(Interpreter intp, ClassLoader cl) {
-    super(new Properties());
-    this.cl = cl;
-    this.intp = intp;
-  }
-
-  @Override
-  public Interpreter getInnerInterpreter() {
-    return intp;
-  }
-
-  public ClassLoader getClassloader() {
-    return cl;
-  }
-
-  @Override
-  public InterpreterResult interpret(String st, InterpreterContext context) {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.interpret(st, context);
-    } catch (Exception e) {
-      e.printStackTrace();
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-
-  @Override
-  public void open() {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      intp.open();
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public void close() {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      intp.close();
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      intp.cancel(context);
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public FormType getFormType() {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.getFormType();
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.getProgress(context);
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.getScheduler();
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.completion(buf, cursor);
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-
-  @Override
-  public String getClassName() {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.getClassName();
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      intp.setInterpreterGroup(interpreterGroup);
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public InterpreterGroup getInterpreterGroup() {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.getInterpreterGroup();
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public void setClassloaderUrls(URL [] urls) {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      intp.setClassloaderUrls(urls);
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public URL [] getClassloaderUrls() {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.getClassloaderUrls();
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public void setProperty(Properties property) {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      intp.setProperty(property);
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public Properties getProperty() {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.getProperty();
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-  @Override
-  public String getProperty(String key) {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(cl);
-    try {
-      return intp.getProperty(key);
-    } catch (Exception e) {
-      throw new InterpreterException(e);
-    } finally {
-      cl = Thread.currentThread().getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/Interpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/Interpreter.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/Interpreter.java
deleted file mode 100644
index acb62a2..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/Interpreter.java
+++ /dev/null
@@ -1,267 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-
-import java.net.URL;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-/**
- * Interface for interpreters.
- * If you want to implement new Zeppelin interpreter, extend this class
- *
- * Please see,
- * http://zeppelin.incubator.apache.org/docs/development/writingzeppelininterpreter.html
- *
- * open(), close(), interpreter() is three the most important method you need to implement.
- * cancel(), getProgress(), completion() is good to have
- * getFormType(), getScheduler() determine Zeppelin's behavior
- *
- */
-public abstract class Interpreter {
-
-  /**
-   * Opens interpreter. You may want to place your initialize routine here.
-   * open() is called only once
-   */
-  public abstract void open();
-
-  /**
-   * Closes interpreter. You may want to free your resources up here.
-   * close() is called only once
-   */
-  public abstract void close();
-
-  /**
-   * Run code and return result, in synchronous way.
-   *
-   * @param st statements to run
-   * @param context
-   * @return
-   */
-  public abstract InterpreterResult interpret(String st, InterpreterContext context);
-
-  /**
-   * Optionally implement the canceling routine to abort interpret() method
-   *
-   * @param context
-   */
-  public abstract void cancel(InterpreterContext context);
-
-  /**
-   * Dynamic form handling
-   * see http://zeppelin.incubator.apache.org/docs/dynamicform.html
-   *
-   * @return FormType.SIMPLE enables simple pattern replacement (eg. Hello ${name=world}),
-   *         FormType.NATIVE handles form in API
-   */
-  public abstract FormType getFormType();
-
-  /**
-   * get interpret() method running process in percentage.
-   *
-   * @param context
-   * @return number between 0-100
-   */
-  public abstract int getProgress(InterpreterContext context);
-
-  /**
-   * Get completion list based on cursor position.
-   * By implementing this method, it enables auto-completion.
-   *
-   * @param buf statements
-   * @param cursor cursor position in statements
-   * @return list of possible completion. Return empty list if there're nothing to return.
-   */
-  public abstract List<String> completion(String buf, int cursor);
-
-  /**
-   * Interpreter can implements it's own scheduler by overriding this method.
-   * There're two default scheduler provided, FIFO, Parallel.
-   * If your interpret() can handle concurrent request, use Parallel or use FIFO.
-   *
-   * You can get default scheduler by using
-   * SchedulerFactory.singleton().createOrGetFIFOScheduler()
-   * SchedulerFactory.singleton().createOrGetParallelScheduler()
-   *
-   *
-   * @return return scheduler instance.
-   *         This method can be called multiple times and have to return the same instance.
-   *         Can not return null.
-   */
-  public Scheduler getScheduler() {
-    return SchedulerFactory.singleton().createOrGetFIFOScheduler("interpreter_" + this.hashCode());
-  }
-
-  /**
-   * Called when interpreter is no longer used.
-   */
-  public void destroy() {
-    getScheduler().stop();
-  }
-
-
-
-
-
-  static Logger logger = LoggerFactory.getLogger(Interpreter.class);
-  private InterpreterGroup interpreterGroup;
-  private URL [] classloaderUrls;
-  protected Properties property;
-
-  public Interpreter(Properties property) {
-    this.property = property;
-  }
-
-  public void setProperty(Properties property) {
-    this.property = property;
-  }
-
-  public Properties getProperty() {
-    Properties p = new Properties();
-    p.putAll(property);
-
-    Map<String, InterpreterProperty> defaultProperties = Interpreter
-        .findRegisteredInterpreterByClassName(getClassName()).getProperties();
-    for (String k : defaultProperties.keySet()) {
-      if (!p.contains(k)) {
-        String value = defaultProperties.get(k).getDefaultValue();
-        if (value != null) {
-          p.put(k, defaultProperties.get(k).getDefaultValue());
-        }
-      }
-    }
-
-    return property;
-  }
-
-  public String getProperty(String key) {
-    if (property.containsKey(key)) {
-      return property.getProperty(key);
-    }
-
-    Map<String, InterpreterProperty> defaultProperties = Interpreter
-        .findRegisteredInterpreterByClassName(getClassName()).getProperties();
-    if (defaultProperties.containsKey(key)) {
-      return defaultProperties.get(key).getDefaultValue();
-    }
-
-    return null;
-  }
-
-
-  public String getClassName() {
-    return this.getClass().getName();
-  }
-
-  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
-    this.interpreterGroup = interpreterGroup;
-  }
-
-  public InterpreterGroup getInterpreterGroup() {
-    return this.interpreterGroup;
-  }
-
-  public URL[] getClassloaderUrls() {
-    return classloaderUrls;
-  }
-
-  public void setClassloaderUrls(URL[] classloaderUrls) {
-    this.classloaderUrls = classloaderUrls;
-  }
-
-
-  /**
-   * Type of interpreter.
-   */
-  public static enum FormType {
-    NATIVE, SIMPLE, NONE
-  }
-
-  /**
-   * Represent registered interpreter class
-   */
-  public static class RegisteredInterpreter {
-    private String name;
-    private String group;
-    private String className;
-    private Map<String, InterpreterProperty> properties;
-    private String path;
-
-    public RegisteredInterpreter(String name, String group, String className,
-        Map<String, InterpreterProperty> properties) {
-      super();
-      this.name = name;
-      this.group = group;
-      this.className = className;
-      this.properties = properties;
-    }
-
-    public String getName() {
-      return name;
-    }
-
-    public String getGroup() {
-      return group;
-    }
-
-    public String getClassName() {
-      return className;
-    }
-
-    public Map<String, InterpreterProperty> getProperties() {
-      return properties;
-    }
-
-    public void setPath(String path) {
-      this.path = path;
-    }
-
-    public String getPath() {
-      return path;
-    }
-
-  }
-
-  /**
-   * Type of Scheduling.
-   */
-  public static enum SchedulingMode {
-    FIFO, PARALLEL
-  }
-
-  public static Map<String, RegisteredInterpreter> registeredInterpreters = Collections
-      .synchronizedMap(new HashMap<String, RegisteredInterpreter>());
-
-  public static void register(String name, String className) {
-    register(name, name, className);
-  }
-
-  public static void register(String name, String group, String className) {
-    register(name, group, className, new HashMap<String, InterpreterProperty>());
-  }
-
-  public static void register(String name, String group, String className,
-      Map<String, InterpreterProperty> properties) {
-    registeredInterpreters.put(name, new RegisteredInterpreter(name, group, className, properties));
-  }
-
-  public static RegisteredInterpreter findRegisteredInterpreterByClassName(String className) {
-    for (RegisteredInterpreter ri : registeredInterpreters.values()) {
-      if (ri.getClassName().equals(className)) {
-        return ri;
-      }
-    }
-    return null;
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterContext.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterContext.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterContext.java
deleted file mode 100644
index d99e8b0..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterContext.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.util.Map;
-
-import com.nflabs.zeppelin.display.GUI;
-
-/**
- * Interpreter context
- */
-public class InterpreterContext {
-  private final String paragraphTitle;
-  private final String paragraphId;
-  private final String paragraphText;
-  private final Map<String, Object> config;
-  private GUI gui;
-
-
-  public InterpreterContext(String paragraphId,
-                            String paragraphTitle,
-                            String paragraphText,
-                            Map<String, Object> config,
-                            GUI gui
-                            ) {
-    this.paragraphId = paragraphId;
-    this.paragraphTitle = paragraphTitle;
-    this.paragraphText = paragraphText;
-    this.config = config;
-    this.gui = gui;
-  }
-
-  public String getParagraphId() {
-    return paragraphId;
-  }
-
-  public String getParagraphText() {
-    return paragraphText;
-  }
-
-  public String getParagraphTitle() {
-    return paragraphTitle;
-  }
-
-  public Map<String, Object> getConfig() {
-    return config;
-  }
-
-  public GUI getGui() {
-    return gui;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterException.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterException.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterException.java
deleted file mode 100644
index 8f50363..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterException.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-/**
- * Runtime Exception for interpreters.
- * 
- */
-public class InterpreterException extends RuntimeException {
-
-  public InterpreterException(Throwable e) {
-    super(e);
-  }
-
-  public InterpreterException(String m) {
-    super(m);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterGroup.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterGroup.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterGroup.java
deleted file mode 100644
index ad2b348..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterGroup.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.util.LinkedList;
-import java.util.Properties;
-import java.util.Random;
-
-/**
- * InterpreterGroup is list of interpreters in the same group.
- * And unit of interpreter instantiate, restart, bind, unbind.
- */
-public class InterpreterGroup extends LinkedList<Interpreter>{
-  String id;
-
-  private static String generateId() {
-    return "InterpreterGroup_" + System.currentTimeMillis() + "_"
-           + new Random().nextInt();
-  }
-
-  public String getId() {
-    synchronized (this) {
-      if (id == null) {
-        id = generateId();
-      }
-      return id;
-    }
-  }
-
-
-  public Properties getProperty() {
-    Properties p = new Properties();
-    for (Interpreter intp : this) {
-      p.putAll(intp.getProperty());
-    }
-    return p;
-  }
-
-  public void close() {
-    for (Interpreter intp : this) {
-      intp.close();
-    }
-  }
-
-  public void destroy() {
-    for (Interpreter intp : this) {
-      intp.destroy();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterProperty.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterProperty.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterProperty.java
deleted file mode 100644
index 63017e0..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterProperty.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-/**
- * Represent property of interpreter 
- */
-public class InterpreterProperty {
-  String defaultValue;
-  String description;
-
-  public InterpreterProperty(String defaultValue,
-      String description) {
-    super();
-    this.defaultValue = defaultValue;
-    this.description = description;
-  }
-
-  public String getDefaultValue() {
-    return defaultValue;
-  }
-
-  public void setDefaultValue(String defaultValue) {
-    this.defaultValue = defaultValue;
-  }
-
-  public String getDescription() {
-    return description;
-  }
-
-  public void setDescription(String description) {
-    this.description = description;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterPropertyBuilder.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterPropertyBuilder.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterPropertyBuilder.java
deleted file mode 100644
index 34aa51a..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterPropertyBuilder.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * InterpreterPropertyBuilder 
- */
-public class InterpreterPropertyBuilder {
-  Map<String, InterpreterProperty> properties = new HashMap<String, InterpreterProperty>();
-  
-  public InterpreterPropertyBuilder add(String name, String defaultValue, String description){
-    properties.put(name, new InterpreterProperty(defaultValue, description));
-    return this;
-  }
-  
-  public Map<String, InterpreterProperty> build(){
-    return properties;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterResult.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterResult.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterResult.java
deleted file mode 100644
index 94bf673..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterResult.java
+++ /dev/null
@@ -1,120 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.io.Serializable;
-
-/**
- * Interpreter result template.
- *
- * @author Leemoonsoo
- *
- */
-public class InterpreterResult implements Serializable {
-
-  /**
-   *  Type of result after code execution.
-   *
-   * @author Leemoonsoo
-   *
-   */
-  public static enum Code {
-    SUCCESS,
-    INCOMPLETE,
-    ERROR
-  }
-
-  /**
-   * Type of Data.
-   *
-   * @author Leemoonsoo
-   *
-   */
-  public static enum Type {
-    TEXT,
-    HTML,
-    TABLE,
-    IMG,
-    SVG,
-    NULL
-  }
-
-  Code code;
-  Type type;
-  String msg;
-
-  public InterpreterResult(Code code) {
-    this.code = code;
-    this.msg = null;
-    this.type = Type.TEXT;
-  }
-
-  public InterpreterResult(Code code, String msg) {
-    this.code = code;
-    this.msg = getData(msg);
-    this.type = getType(msg);
-  }
-
-  public InterpreterResult(Code code, Type type, String msg) {
-    this.code = code;
-    this.msg = msg;
-    this.type = type;
-  }
-
-  /**
-   * Magic is like %html %text.
-   *
-   * @param msg
-   * @return
-   */
-  private String getData(String msg) {
-    if (msg == null) {
-      return null;
-    }
-
-    Type[] types = Type.values();
-    for (Type t : types) {
-      String magic = "%" + t.name().toLowerCase();
-      if (msg.startsWith(magic + " ") || msg.startsWith(magic + "\n")) {
-        int magicLength = magic.length() + 1;
-        if (msg.length() > magicLength) {
-          return msg.substring(magicLength);
-        } else {
-          return "";
-        }
-      }
-    }
-
-    return msg;
-  }
-
-
-  private Type getType(String msg) {
-    if (msg == null) {
-      return Type.TEXT;
-    }
-    Type[] types = Type.values();
-    for (Type t : types) {
-      String magic = "%" + t.name().toLowerCase();
-      if (msg.startsWith(magic + " ") || msg.startsWith(magic + "\n")) {
-        return t;
-      }
-    }
-    return Type.TEXT;
-  }
-
-  public Code code() {
-    return code;
-  }
-
-  public String message() {
-    return msg;
-  }
-
-  public Type type() {
-    return type;
-  }
-
-  public InterpreterResult type(Type type) {
-    this.type = type;
-    return this;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterUtils.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterUtils.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterUtils.java
deleted file mode 100644
index 37f9ff9..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterUtils.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.nflabs.zeppelin.interpreter;
-
-
-import java.lang.reflect.InvocationTargetException;
-
-/**
- * Interpreter utility functions
- */
-public class InterpreterUtils {
-
-  public static String getMostRelevantMessage(Exception ex) {
-    if (ex instanceof InvocationTargetException) {
-      Throwable cause = ((InvocationTargetException) ex).getCause();
-      if (cause != null) {
-        return cause.getMessage();
-      }
-    }
-    String message = ex.getMessage();
-    if (message == null || message == "") {
-      return ex.toString();
-    }
-    return message;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/LazyOpenInterpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/LazyOpenInterpreter.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/LazyOpenInterpreter.java
deleted file mode 100644
index 753adc9..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/LazyOpenInterpreter.java
+++ /dev/null
@@ -1,131 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.net.URL;
-import java.util.List;
-import java.util.Properties;
-
-import com.nflabs.zeppelin.scheduler.Scheduler;
-
-/**
- * Interpreter wrapper for lazy initialization
- */
-public class LazyOpenInterpreter
-    extends Interpreter
-    implements WrappedInterpreter {
-  private Interpreter intp;
-  boolean opened = false;
-
-  public LazyOpenInterpreter(Interpreter intp) {
-    super(new Properties());
-    this.intp = intp;
-  }
-
-  @Override
-  public Interpreter getInnerInterpreter() {
-    return intp;
-  }
-
-  @Override
-  public void setProperty(Properties property) {
-    intp.setProperty(property);
-  }
-
-  @Override
-  public Properties getProperty() {
-    return intp.getProperty();
-  }
-
-  @Override
-  public String getProperty(String key) {
-    return intp.getProperty(key);
-  }
-
-  @Override
-  public void open() {
-    if (opened == true) {
-      return;
-    }
-
-    synchronized (intp) {
-      if (opened == false) {
-        intp.open();
-        opened = true;
-      }
-    }
-  }
-
-  @Override
-  public void close() {
-    synchronized (intp) {
-      if (opened == true) {
-        intp.close();
-        opened = false;
-      }
-    }
-  }
-
-  public boolean isOpen() {
-    synchronized (intp) {
-      return opened;
-    }
-  }
-
-  @Override
-  public InterpreterResult interpret(String st, InterpreterContext context) {
-    open();
-    return intp.interpret(st, context);
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {
-    open();
-    intp.cancel(context);
-  }
-
-  @Override
-  public FormType getFormType() {
-    return intp.getFormType();
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    open();
-    return intp.getProgress(context);
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    return intp.getScheduler();
-  }
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    open();
-    return intp.completion(buf, cursor);
-  }
-
-  @Override
-  public String getClassName() {
-    return intp.getClassName();
-  }
-
-  @Override
-  public InterpreterGroup getInterpreterGroup() {
-    return intp.getInterpreterGroup();
-  }
-
-  @Override
-  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
-    intp.setInterpreterGroup(interpreterGroup);
-  }
-
-  @Override
-  public URL [] getClassloaderUrls() {
-    return intp.getClassloaderUrls();
-  }
-
-  @Override
-  public void setClassloaderUrls(URL [] urls) {
-    intp.setClassloaderUrls(urls);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/WrappedInterpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/WrappedInterpreter.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/WrappedInterpreter.java
deleted file mode 100644
index 47c71ff..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/WrappedInterpreter.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-/**
- * WrappedInterpreter
- */
-public interface WrappedInterpreter {
-  public Interpreter getInnerInterpreter();
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/ClientFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/ClientFactory.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/ClientFactory.java
deleted file mode 100644
index 670dc2e..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/ClientFactory.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.pool2.BasePooledObjectFactory;
-import org.apache.commons.pool2.PooledObject;
-import org.apache.commons.pool2.impl.DefaultPooledObject;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.transport.TSocket;
-import org.apache.thrift.transport.TTransportException;
-
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterService;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
-
-/**
- *
- */
-public class ClientFactory extends BasePooledObjectFactory<Client>{
-  private String host;
-  private int port;
-  Map<Client, TSocket> clientSocketMap = new HashMap<Client, TSocket>();
-
-  public ClientFactory(String host, int port) {
-    this.host = host;
-    this.port = port;
-  }
-
-  @Override
-  public Client create() throws Exception {
-    TSocket transport = new TSocket(host, port);
-    try {
-      transport.open();
-    } catch (TTransportException e) {
-      throw new InterpreterException(e);
-    }
-
-    TProtocol protocol = new  TBinaryProtocol(transport);
-    Client client = new RemoteInterpreterService.Client(protocol);
-
-    synchronized (clientSocketMap) {
-      clientSocketMap.put(client, transport);
-    }
-    return client;
-  }
-
-  @Override
-  public PooledObject<Client> wrap(Client client) {
-    return new DefaultPooledObject<Client>(client);
-  }
-
-  @Override
-  public void destroyObject(PooledObject<Client> p) {
-    synchronized (clientSocketMap) {
-      if (clientSocketMap.containsKey(p)) {
-        clientSocketMap.get(p).close();
-        clientSocketMap.remove(p);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreter.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreter.java
deleted file mode 100644
index ccae0f7..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreter.java
+++ /dev/null
@@ -1,330 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.thrift.TException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Type;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterContext;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterResult;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-/**
- *
- */
-public class RemoteInterpreter extends Interpreter {
-  Logger logger = LoggerFactory.getLogger(RemoteInterpreter.class);
-  Gson gson = new Gson();
-  private String interpreterRunner;
-  private String interpreterPath;
-  private String className;
-  FormType formType;
-  boolean initialized;
-  private Map<String, String> env;
-  static Map<String, RemoteInterpreterProcess> interpreterGroupReference
-    = new HashMap<String, RemoteInterpreterProcess>();
-
-  public RemoteInterpreter(Properties property,
-      String className,
-      String interpreterRunner,
-      String interpreterPath) {
-    super(property);
-
-    this.className = className;
-    initialized = false;
-    this.interpreterRunner = interpreterRunner;
-    this.interpreterPath = interpreterPath;
-    env = new HashMap<String, String>();
-  }
-
-  public RemoteInterpreter(Properties property,
-      String className,
-      String interpreterRunner,
-      String interpreterPath,
-      Map<String, String> env) {
-    super(property);
-
-    this.className = className;
-    this.interpreterRunner = interpreterRunner;
-    this.interpreterPath = interpreterPath;
-    this.env = env;
-  }
-
-  @Override
-  public String getClassName() {
-    return className;
-  }
-
-  public RemoteInterpreterProcess getInterpreterProcess() {
-    synchronized (interpreterGroupReference) {
-      if (interpreterGroupReference.containsKey(getInterpreterGroupKey(getInterpreterGroup()))) {
-        RemoteInterpreterProcess interpreterProcess = interpreterGroupReference
-            .get(getInterpreterGroupKey(getInterpreterGroup()));
-        try {
-          return interpreterProcess;
-        } catch (Exception e) {
-          throw new InterpreterException(e);
-        }
-      } else {
-        throw new InterpreterException("Unexpected error");
-      }
-    }
-  }
-
-  private synchronized void init() {
-    if (initialized == true) {
-      return;
-    }
-
-    RemoteInterpreterProcess interpreterProcess = null;
-
-    synchronized (interpreterGroupReference) {
-      if (interpreterGroupReference.containsKey(getInterpreterGroupKey(getInterpreterGroup()))) {
-        interpreterProcess = interpreterGroupReference
-            .get(getInterpreterGroupKey(getInterpreterGroup()));
-      } else {
-        throw new InterpreterException("Unexpected error");
-      }
-    }
-
-    int rc = interpreterProcess.reference();
-
-    synchronized (interpreterProcess) {
-      // when first process created
-      if (rc == 1) {
-        // create all interpreter class in this interpreter group
-        Client client = null;
-        try {
-          client = interpreterProcess.getClient();
-        } catch (Exception e1) {
-          throw new InterpreterException(e1);
-        }
-
-        try {
-          for (Interpreter intp : this.getInterpreterGroup()) {
-            logger.info("Create remote interpreter {}", intp.getClassName());
-            client.createInterpreter(intp.getClassName(), (Map) property);
-
-          }
-        } catch (TException e) {
-          throw new InterpreterException(e);
-        } finally {
-          interpreterProcess.releaseClient(client);
-        }
-      }
-    }
-    initialized = true;
-  }
-
-
-
-  @Override
-  public void open() {
-    init();
-  }
-
-  @Override
-  public void close() {
-    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
-    Client client = null;
-    try {
-      client = interpreterProcess.getClient();
-    } catch (Exception e1) {
-      throw new InterpreterException(e1);
-    }
-
-    try {
-      client.close(className);
-    } catch (TException e) {
-      throw new InterpreterException(e);
-    } finally {
-      interpreterProcess.releaseClient(client);
-    }
-
-    interpreterProcess.dereference();
-  }
-
-  @Override
-  public InterpreterResult interpret(String st, InterpreterContext context) {
-    FormType form = getFormType();
-    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
-    Client client = null;
-    try {
-      client = interpreterProcess.getClient();
-    } catch (Exception e1) {
-      throw new InterpreterException(e1);
-    }
-
-    try {
-      GUI settings = context.getGui();
-      RemoteInterpreterResult remoteResult = client.interpret(className, st, convert(context));
-
-      Map<String, Object> remoteConfig = (Map<String, Object>) gson.fromJson(
-          remoteResult.getConfig(), new TypeToken<Map<String, Object>>() {
-          }.getType());
-      context.getConfig().clear();
-      context.getConfig().putAll(remoteConfig);
-
-      if (form == FormType.NATIVE) {
-        GUI remoteGui = gson.fromJson(remoteResult.getGui(), GUI.class);
-        context.getGui().clear();
-        context.getGui().setParams(remoteGui.getParams());
-        context.getGui().setForms(remoteGui.getForms());
-      }
-
-      return convert(remoteResult);
-    } catch (TException e) {
-      throw new InterpreterException(e);
-    } finally {
-      interpreterProcess.releaseClient(client);
-    }
-  }
-
-  @Override
-  public void cancel(InterpreterContext context) {
-    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
-    Client client = null;
-    try {
-      client = interpreterProcess.getClient();
-    } catch (Exception e1) {
-      throw new InterpreterException(e1);
-    }
-
-    try {
-      client.cancel(className, convert(context));
-    } catch (TException e) {
-      throw new InterpreterException(e);
-    } finally {
-      interpreterProcess.releaseClient(client);
-    }
-  }
-
-
-  @Override
-  public FormType getFormType() {
-    init();
-
-    if (formType != null) {
-      return formType;
-    }
-
-    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
-    Client client = null;
-    try {
-      client = interpreterProcess.getClient();
-    } catch (Exception e1) {
-      throw new InterpreterException(e1);
-    }
-
-    try {
-      formType = FormType.valueOf(client.getFormType(className));
-      return formType;
-    } catch (TException e) {
-      throw new InterpreterException(e);
-    } finally {
-      interpreterProcess.releaseClient(client);
-    }
-  }
-
-  @Override
-  public int getProgress(InterpreterContext context) {
-    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
-    Client client = null;
-    try {
-      client = interpreterProcess.getClient();
-    } catch (Exception e1) {
-      throw new InterpreterException(e1);
-    }
-
-    try {
-      return client.getProgress(className, convert(context));
-    } catch (TException e) {
-      throw new InterpreterException(e);
-    } finally {
-      interpreterProcess.releaseClient(client);
-    }
-  }
-
-
-  @Override
-  public List<String> completion(String buf, int cursor) {
-    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
-    Client client = null;
-    try {
-      client = interpreterProcess.getClient();
-    } catch (Exception e1) {
-      throw new InterpreterException(e1);
-    }
-
-    try {
-      return client.completion(className, buf, cursor);
-    } catch (TException e) {
-      throw new InterpreterException(e);
-    } finally {
-      interpreterProcess.releaseClient(client);
-    }
-  }
-
-  @Override
-  public Scheduler getScheduler() {
-    int maxConcurrency = 10;
-    RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
-    return SchedulerFactory.singleton().createOrGetRemoteScheduler(
-        "remoteinterpreter_" + interpreterProcess.hashCode(),
-        getInterpreterProcess(),
-        maxConcurrency);
-  }
-
-  @Override
-  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
-    super.setInterpreterGroup(interpreterGroup);
-
-    synchronized (interpreterGroupReference) {
-      if (!interpreterGroupReference
-          .containsKey(getInterpreterGroupKey(interpreterGroup))) {
-        interpreterGroupReference.put(getInterpreterGroupKey(interpreterGroup),
-            new RemoteInterpreterProcess(interpreterRunner,
-                interpreterPath, env));
-
-        logger.info("setInterpreterGroup = "
-            + getInterpreterGroupKey(interpreterGroup) + " class=" + className
-            + ", path=" + interpreterPath);
-      }
-    }
-  }
-
-  private String getInterpreterGroupKey(InterpreterGroup interpreterGroup) {
-    return interpreterGroup.getId();
-  }
-
-  private RemoteInterpreterContext convert(InterpreterContext ic) {
-    return new RemoteInterpreterContext(
-        ic.getParagraphId(),
-        ic.getParagraphTitle(),
-        ic.getParagraphText(),
-        gson.toJson(ic.getConfig()),
-        gson.toJson(ic.getGui()));
-  }
-
-  private InterpreterResult convert(RemoteInterpreterResult result) {
-    return new InterpreterResult(
-        InterpreterResult.Code.valueOf(result.getCode()),
-        Type.valueOf(result.getType()),
-        result.getMsg());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterProcess.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterProcess.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterProcess.java
deleted file mode 100644
index 3829618..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterProcess.java
+++ /dev/null
@@ -1,192 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.exec.DefaultExecutor;
-import org.apache.commons.exec.ExecuteException;
-import org.apache.commons.exec.ExecuteResultHandler;
-import org.apache.commons.exec.ExecuteWatchdog;
-import org.apache.commons.exec.environment.EnvironmentUtils;
-import org.apache.commons.pool2.impl.GenericObjectPool;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
-
-/**
- *
- */
-public class RemoteInterpreterProcess implements ExecuteResultHandler {
-  Logger logger = LoggerFactory.getLogger(RemoteInterpreterProcess.class);
-  AtomicInteger referenceCount;
-  private DefaultExecutor executor;
-  private ExecuteWatchdog watchdog;
-  boolean running = false;
-  int port = -1;
-  private String interpreterRunner;
-  private String interpreterDir;
-
-  private GenericObjectPool<Client> clientPool;
-  private Map<String, String> env;
-
-  public RemoteInterpreterProcess(String intpRunner, String intpDir, Map<String, String> env) {
-    this.interpreterRunner = intpRunner;
-    this.interpreterDir = intpDir;
-    this.env = env;
-    referenceCount = new AtomicInteger(0);
-  }
-
-  public int getPort() {
-    return port;
-  }
-
-  public int reference() {
-    synchronized (referenceCount) {
-      if (executor == null) {
-        // start server process
-        try {
-          port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
-        } catch (IOException e1) {
-          throw new InterpreterException(e1);
-        }
-
-
-        CommandLine cmdLine = CommandLine.parse(interpreterRunner);
-        cmdLine.addArgument("-d", false);
-        cmdLine.addArgument(interpreterDir, false);
-        cmdLine.addArgument("-p", false);
-        cmdLine.addArgument(Integer.toString(port), false);
-
-        executor = new DefaultExecutor();
-
-        watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
-        executor.setWatchdog(watchdog);
-
-        running = true;
-        try {
-          Map procEnv = EnvironmentUtils.getProcEnvironment();
-          procEnv.putAll(env);
-
-          logger.info("Run interpreter process {}", cmdLine);
-          executor.execute(cmdLine, procEnv, this);
-        } catch (IOException e) {
-          running = false;
-          throw new InterpreterException(e);
-        }
-
-
-        long startTime = System.currentTimeMillis();
-        while (System.currentTimeMillis() - startTime < 5 * 1000) {
-          if (RemoteInterpreterUtils.checkIfRemoteEndpointAccessible("localhost", port)) {
-            break;
-          } else {
-            try {
-              Thread.sleep(500);
-            } catch (InterruptedException e) {
-            }
-          }
-        }
-
-        clientPool = new GenericObjectPool<Client>(new ClientFactory("localhost", port));
-      }
-      return referenceCount.incrementAndGet();
-    }
-  }
-
-  public Client getClient() throws Exception {
-    return clientPool.borrowObject();
-  }
-
-  public void releaseClient(Client client) {
-    clientPool.returnObject(client);
-  }
-
-  public int dereference() {
-    synchronized (referenceCount) {
-      int r = referenceCount.decrementAndGet();
-      if (r == 0) {
-        logger.info("shutdown interpreter process");
-        // first try shutdown
-        try {
-          Client client = getClient();
-          client.shutdown();
-          releaseClient(client);
-        } catch (Exception e) {
-          logger.error("Error", e);
-          watchdog.destroyProcess();
-        }
-
-        clientPool.clear();
-        clientPool.close();
-
-        // wait for 3 sec and force kill
-        // remote process server.serve() loop is not always finishing gracefully
-        long startTime = System.currentTimeMillis();
-        while (System.currentTimeMillis() - startTime < 3 * 1000) {
-          if (this.isRunning()) {
-            try {
-              Thread.sleep(500);
-            } catch (InterruptedException e) {
-            }
-          } else {
-            break;
-          }
-        }
-
-        if (isRunning()) {
-          logger.info("kill interpreter process");
-          watchdog.destroyProcess();
-        }
-
-        executor = null;
-        watchdog = null;
-        running = false;
-        logger.info("Remote process terminated");
-      }
-      return r;
-    }
-  }
-
-  public int referenceCount() {
-    synchronized (referenceCount) {
-      return referenceCount.get();
-    }
-  }
-
-  @Override
-  public void onProcessComplete(int exitValue) {
-    logger.info("Interpreter process exited {}", exitValue);
-    running = false;
-
-  }
-
-  @Override
-  public void onProcessFailed(ExecuteException e) {
-    logger.info("Interpreter process failed {}", e);
-    running = false;
-  }
-
-  public boolean isRunning() {
-    return running;
-  }
-
-  public int getNumActiveClient() {
-    if (clientPool == null) {
-      return 0;
-    } else {
-      return clientPool.getNumActive();
-    }
-  }
-
-  public int getNumIdleClient() {
-    if (clientPool == null) {
-      return 0;
-    } else {
-      return clientPool.getNumIdle();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterServer.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterServer.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterServer.java
deleted file mode 100644
index 266d6fc..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterServer.java
+++ /dev/null
@@ -1,325 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote;
-
-
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.net.URL;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.thrift.TException;
-import org.apache.thrift.server.TThreadPoolServer;
-import org.apache.thrift.transport.TServerSocket;
-import org.apache.thrift.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.ClassloaderInterpreter;
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.Interpreter.FormType;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.LazyOpenInterpreter;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterContext;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterResult;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterService;
-import com.nflabs.zeppelin.scheduler.Job;
-import com.nflabs.zeppelin.scheduler.Job.Status;
-import com.nflabs.zeppelin.scheduler.JobListener;
-import com.nflabs.zeppelin.scheduler.JobProgressPoller;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-
-
-/**
- *
- */
-public class RemoteInterpreterServer
-  extends Thread
-  implements RemoteInterpreterService.Iface {
-  Logger logger = LoggerFactory.getLogger(RemoteInterpreterServer.class);
-
-  InterpreterGroup interpreterGroup = new InterpreterGroup();
-  Gson gson = new Gson();
-
-  RemoteInterpreterService.Processor<RemoteInterpreterServer> processor;
-  RemoteInterpreterServer handler;
-  private int port;
-  private TThreadPoolServer server;
-
-  public RemoteInterpreterServer(int port) throws TTransportException {
-    this.port = port;
-    processor = new RemoteInterpreterService.Processor<RemoteInterpreterServer>(this);
-    TServerSocket serverTransport = new TServerSocket(port);
-    server = new TThreadPoolServer(
-        new TThreadPoolServer.Args(serverTransport).processor(processor));
-  }
-
-  @Override
-  public void run() {
-    logger.info("Starting remote interpreter server on port {}", port);
-    server.serve();
-  }
-
-  @Override
-  public void shutdown() throws TException {
-    // server.stop() does not always finish server.serve() loop
-    // sometimes server.serve() is hanging even after server.stop() call.
-    // this case, need to force kill the process
-    server.stop();
-  }
-
-  public int getPort() {
-    return port;
-  }
-
-  public boolean isRunning() {
-    if (server == null) {
-      return false;
-    } else {
-      return server.isServing();
-    }
-  }
-
-
-  public static void main(String[] args)
-      throws TTransportException, InterruptedException {
-    int port = Integer.parseInt(args[0]);
-    RemoteInterpreterServer remoteInterpreterServer = new RemoteInterpreterServer(port);
-    remoteInterpreterServer.start();
-    remoteInterpreterServer.join();
-    System.exit(0);
-  }
-
-
-  @Override
-  public void createInterpreter(String className, Map<String, String> properties)
-      throws TException {
-    try {
-      Class<Interpreter> replClass = (Class<Interpreter>) Object.class.forName(className);
-      Properties p = new Properties();
-      p.putAll(properties);
-
-      Constructor<Interpreter> constructor =
-          replClass.getConstructor(new Class[] {Properties.class});
-      Interpreter repl = constructor.newInstance(p);
-
-      ClassLoader cl = ClassLoader.getSystemClassLoader();
-      repl.setClassloaderUrls(new URL[]{});
-
-      synchronized (interpreterGroup) {
-        interpreterGroup.add(new LazyOpenInterpreter(
-            new ClassloaderInterpreter(repl, cl)));
-      }
-
-      logger.info("Instantiate interpreter {}", className);
-      repl.setInterpreterGroup(interpreterGroup);
-    } catch (ClassNotFoundException | NoSuchMethodException | SecurityException
-        | InstantiationException | IllegalAccessException
-        | IllegalArgumentException | InvocationTargetException e) {
-      e.printStackTrace();
-      throw new TException(e);
-    }
-  }
-
-  private Interpreter getInterpreter(String className) throws TException {
-    synchronized (interpreterGroup) {
-      for (Interpreter inp : interpreterGroup) {
-        if (inp.getClassName().equals(className)) {
-          return inp;
-        }
-      }
-    }
-    throw new TException(new InterpreterException("Interpreter instance "
-        + className + " not found"));
-  }
-
-  @Override
-  public void open(String className) throws TException {
-    Interpreter intp = getInterpreter(className);
-    intp.open();
-  }
-
-  @Override
-  public void close(String className) throws TException {
-    Interpreter intp = getInterpreter(className);
-    intp.close();
-  }
-
-
-  @Override
-  public RemoteInterpreterResult interpret(String className, String st,
-      RemoteInterpreterContext interpreterContext) throws TException {
-    Interpreter intp = getInterpreter(className);
-    InterpreterContext context = convert(interpreterContext);
-
-    Scheduler scheduler = intp.getScheduler();
-    InterpretJobListener jobListener = new InterpretJobListener();
-    InterpretJob job = new InterpretJob(
-        interpreterContext.getParagraphId(),
-        "remoteInterpretJob_" + System.currentTimeMillis(),
-        jobListener,
-        JobProgressPoller.DEFAULT_INTERVAL_MSEC,
-        intp,
-        st,
-        context);
-
-    scheduler.submit(job);
-
-    while (!job.isTerminated()) {
-      synchronized (jobListener) {
-        try {
-          jobListener.wait(1000);
-        } catch (InterruptedException e) {
-        }
-      }
-    }
-
-    if (job.getStatus() == Status.ERROR) {
-      throw new TException(job.getException());
-    } else {
-      if (intp.getFormType() == FormType.NATIVE) {
-        // serialize dynamic form
-
-      }
-
-      return convert((InterpreterResult) job.getReturn(),
-          context.getConfig(),
-          context.getGui());
-    }
-  }
-
-  class InterpretJobListener implements JobListener {
-
-    @Override
-    public void onProgressUpdate(Job job, int progress) {
-    }
-
-    @Override
-    public void beforeStatusChange(Job job, Status before, Status after) {
-    }
-
-    @Override
-    public void afterStatusChange(Job job, Status before, Status after) {
-      synchronized (this) {
-        notifyAll();
-      }
-    }
-  }
-
-  class InterpretJob extends Job {
-
-    private Interpreter interpreter;
-    private String script;
-    private InterpreterContext context;
-
-    public InterpretJob(
-        String jobId,
-        String jobName,
-        JobListener listener,
-        long progressUpdateIntervalMsec,
-        Interpreter interpreter,
-        String script,
-        InterpreterContext context) {
-      super(jobId, jobName, listener, progressUpdateIntervalMsec);
-      this.interpreter = interpreter;
-      this.script = script;
-      this.context = context;
-    }
-
-    @Override
-    public int progress() {
-      return 0;
-    }
-
-    @Override
-    public Map<String, Object> info() {
-      return null;
-    }
-
-    @Override
-    protected Object jobRun() throws Throwable {
-      InterpreterResult result = interpreter.interpret(script, context);
-      return result;
-    }
-
-    @Override
-    protected boolean jobAbort() {
-      return false;
-    }
-  }
-
-
-  @Override
-  public void cancel(String className, RemoteInterpreterContext interpreterContext)
-      throws TException {
-    Interpreter intp = getInterpreter(className);
-    intp.cancel(convert(interpreterContext));
-  }
-
-  @Override
-  public int getProgress(String className, RemoteInterpreterContext interpreterContext)
-      throws TException {
-    Interpreter intp = getInterpreter(className);
-    return intp.getProgress(convert(interpreterContext));
-  }
-
-
-  @Override
-  public String getFormType(String className) throws TException {
-    Interpreter intp = getInterpreter(className);
-    return intp.getFormType().toString();
-  }
-
-  @Override
-  public List<String> completion(String className, String buf, int cursor) throws TException {
-    Interpreter intp = getInterpreter(className);
-    return intp.completion(buf, cursor);
-  }
-
-  private InterpreterContext convert(RemoteInterpreterContext ric) {
-    return new InterpreterContext(
-        ric.getParagraphId(),
-        ric.getParagraphTitle(),
-        ric.getParagraphText(),
-        (Map<String, Object>) gson.fromJson(ric.getConfig(),
-            new TypeToken<Map<String, Object>>() {}.getType()),
-        gson.fromJson(ric.getGui(), GUI.class));
-  }
-
-  private RemoteInterpreterResult convert(InterpreterResult result,
-      Map<String, Object> config, GUI gui) {
-    return new RemoteInterpreterResult(
-        result.code().name(),
-        result.type().name(),
-        result.message(),
-        gson.toJson(config),
-        gson.toJson(gui));
-  }
-
-  @Override
-  public String getStatus(String jobId)
-      throws TException {
-    synchronized (interpreterGroup) {
-      for (Interpreter intp : interpreterGroup) {
-        for (Job job : intp.getScheduler().getJobsRunning()) {
-          if (jobId.equals(job.getId())) {
-            return job.getStatus().name();
-          }
-        }
-
-        for (Job job : intp.getScheduler().getJobsWaiting()) {
-          if (jobId.equals(job.getId())) {
-            return job.getStatus().name();
-          }
-        }
-      }
-    }
-    return "Unknown";
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterUtils.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterUtils.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterUtils.java
deleted file mode 100644
index 0c8a505..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/remote/RemoteInterpreterUtils.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package com.nflabs.zeppelin.interpreter.remote;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.ServerSocket;
-import java.net.Socket;
-
-/**
- *
- */
-public class RemoteInterpreterUtils {
-  public static int findRandomAvailablePortOnAllLocalInterfaces() throws IOException {
-    int port;
-    try (ServerSocket socket = new ServerSocket(0);) {
-      port = socket.getLocalPort();
-      socket.close();
-    }
-    return port;
-  }
-
-  public static boolean checkIfRemoteEndpointAccessible(String host, int port) {
-    try {
-      Socket discover = new Socket();
-      discover.setSoTimeout(1000);
-      discover.connect(new InetSocketAddress(host, port), 1000);
-      discover.close();
-      return true;
-    } catch (IOException e) {
-      return false;
-    }
-  }
-}


[03/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/app/views/popover-html-unsafe-popup.html
----------------------------------------------------------------------
diff --git a/zeppelin-web/app/views/popover-html-unsafe-popup.html b/zeppelin-web/app/views/popover-html-unsafe-popup.html
index 5b7002d..e36b041 100644
--- a/zeppelin-web/app/views/popover-html-unsafe-popup.html
+++ b/zeppelin-web/app/views/popover-html-unsafe-popup.html
@@ -1,3 +1,17 @@
+<!--
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
 <div class="popover {{placement}}" ng-class="{ in: isOpen(), fade: animation() }">
   <div class="arrow"></div>
 
@@ -5,4 +19,4 @@
       <h3 class="popover-title" ng-bind="title" ng-show="title"></h3>
       <div class="popover-content" bind-html-unsafe="content"></div>
   </div>
-</div>
\ No newline at end of file
+</div>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/bower
----------------------------------------------------------------------
diff --git a/zeppelin-web/bower b/zeppelin-web/bower
index 80814b4..ed00320 100755
--- a/zeppelin-web/bower
+++ b/zeppelin-web/bower
@@ -1 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 "node/node" "./node_modules/bower/bin/bower" "$@"

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/grunt
----------------------------------------------------------------------
diff --git a/zeppelin-web/grunt b/zeppelin-web/grunt
index 8ac5c06..759cae7 100755
--- a/zeppelin-web/grunt
+++ b/zeppelin-web/grunt
@@ -1,2 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 "node/node" "./node_modules/.bin/grunt" "$@"
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-web/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-web/pom.xml b/zeppelin-web/pom.xml
index a3f59f2..0cb0d5b 100644
--- a/zeppelin-web/pom.xml
+++ b/zeppelin-web/pom.xml
@@ -1,15 +1,31 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-<!-- 
-  This is commented because somehow this force zeppelin-webb to be build 2 times.
+
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
--->
-  <groupId>com.nflabs.zeppelin</groupId>
+
+  <groupId>org.apache.zeppelin</groupId>
   <artifactId>zeppelin-web</artifactId>
   <packaging>war</packaging>
   <version>0.5.0-SNAPSHOT</version>
@@ -18,6 +34,42 @@
   <build>
     <plugins>
       <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <version>0.11</version>
+        <configuration>
+          <excludes>
+            <exclude>**/.idea/</exclude>
+            <exclude>**/*.iml</exclude>
+            <exclude>.git/</exclude>
+            <exclude>.gitignore</exclude>
+            <exclude>.bowerrc</exclude>
+            <exclude>.editorconfig</exclude>
+            <exclude>.jshintrc</exclude>
+            <exclude>.tmp/**</exclude>
+            <exclude>**/.settings/*</exclude>
+            <exclude>**/.classpath</exclude>
+            <exclude>**/.project</exclude>
+            <exclude>**/target/**</exclude>            
+            <exclude>node/**</exclude>
+            <exclude>node_modules/**</exclude>
+            <exclude>bower_components/**</exclude>
+            <exclude>test/**</exclude>
+            <exclude>src/main/webapp/**</exclude>
+            <exclude>app/.buildignore</exclude>
+            <exclude>app/fonts/fontawesome*</exclude>
+            <exclude>app/fonts/font-awesome*</exclude>
+            <exclude>app/styles/font-awesome*</exclude>
+            <exclude>app/fonts/Simple-Line*</exclude>
+            <exclude>app/fonts/simple-line*</exclude>
+            <exclude>app/styles/simple-line*</exclude>
+            <exclude>bower.json</exclude>
+            <exclude>package.json</exclude>
+            <exclude>README.md</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <plugin>
         <groupId>com.github.eirslett</groupId>
         <artifactId>frontend-maven-plugin</artifactId>
         <version>0.0.23</version>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/pom.xml b/zeppelin-zengine/pom.xml
index 019c4be..b90847b 100644
--- a/zeppelin-zengine/pom.xml
+++ b/zeppelin-zengine/pom.xml
@@ -1,15 +1,32 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
 
-  <groupId>com.nflabs.zeppelin</groupId>
+  <groupId>org.apache.zeppelin</groupId>
   <artifactId>zeppelin-zengine</artifactId>
   <packaging>jar</packaging>
   <version>0.5.0-SNAPSHOT</version>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/conf/ZeppelinConfiguration.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/conf/ZeppelinConfiguration.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/conf/ZeppelinConfiguration.java
deleted file mode 100644
index 476a464..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/conf/ZeppelinConfiguration.java
+++ /dev/null
@@ -1,514 +0,0 @@
-package com.nflabs.zeppelin.conf;
-
-import java.net.URL;
-import java.util.List;
-
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.XMLConfiguration;
-import org.apache.commons.configuration.tree.ConfigurationNode;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * Zeppelin configuration.
- *
- * @author Leemoonsoo
- *
- */
-public class ZeppelinConfiguration extends XMLConfiguration {
-  private static final String ZEPPELIN_SITE_XML = "zeppelin-site.xml";
-  private static final long serialVersionUID = 4749305895693848035L;
-  private static final Logger LOG = LoggerFactory.getLogger(ZeppelinConfiguration.class);
-  private static ZeppelinConfiguration conf;
-
-  public ZeppelinConfiguration(URL url) throws ConfigurationException {
-    setDelimiterParsingDisabled(true);
-    load(url);
-  }
-
-  public ZeppelinConfiguration() {
-    ConfVars[] vars = ConfVars.values();
-    for (ConfVars v : vars) {
-      if (v.getType() == ConfVars.VarType.BOOLEAN) {
-        this.setProperty(v.getVarName(), v.getBooleanValue());
-      } else if (v.getType() == ConfVars.VarType.LONG) {
-        this.setProperty(v.getVarName(), v.getLongValue());
-      } else if (v.getType() == ConfVars.VarType.INT) {
-        this.setProperty(v.getVarName(), v.getIntValue());
-      } else if (v.getType() == ConfVars.VarType.FLOAT) {
-        this.setProperty(v.getVarName(), v.getFloatValue());
-      } else if (v.getType() == ConfVars.VarType.STRING) {
-        this.setProperty(v.getVarName(), v.getStringValue());
-      } else {
-        throw new RuntimeException("Unsupported VarType");
-      }
-    }
-
-  }
-
-
-  /**
-   * Load from resource.
-   *
-   * @throws ConfigurationException
-   */
-  public static ZeppelinConfiguration create() {
-    if (conf != null) {
-      return conf;
-    }
-
-    ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
-    URL url;
-
-    url = ZeppelinConfiguration.class.getResource(ZEPPELIN_SITE_XML);
-    if (url == null) {
-      ClassLoader cl = ZeppelinConfiguration.class.getClassLoader();
-      if (cl != null) {
-        url = cl.getResource(ZEPPELIN_SITE_XML);
-      }
-    }
-    if (url == null) {
-      url = classLoader.getResource(ZEPPELIN_SITE_XML);
-    }
-
-    if (url == null) {
-      LOG.warn("Failed to load configuration, proceeding with a default");
-      conf = new ZeppelinConfiguration();
-    } else {
-      try {
-        LOG.info("Load configuration from " + url);
-        conf = new ZeppelinConfiguration(url);
-      } catch (ConfigurationException e) {
-        LOG.warn("Failed to load configuration from " + url + " proceeding with a default", e);
-        conf = new ZeppelinConfiguration();
-      }
-    }
-
-    return conf;
-  }
-
-
-  private String getStringValue(String name, String d) {
-    List<ConfigurationNode> properties = getRootNode().getChildren();
-    if (properties == null || properties.size() == 0) {
-      return d;
-    }
-    for (ConfigurationNode p : properties) {
-      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
-          && name.equals(p.getChildren("name").get(0).getValue())) {
-        return (String) p.getChildren("value").get(0).getValue();
-      }
-    }
-    return d;
-  }
-
-  private int getIntValue(String name, int d) {
-    List<ConfigurationNode> properties = getRootNode().getChildren();
-    if (properties == null || properties.size() == 0) {
-      return d;
-    }
-    for (ConfigurationNode p : properties) {
-      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
-          && name.equals(p.getChildren("name").get(0).getValue())) {
-        return Integer.parseInt((String) p.getChildren("value").get(0).getValue());
-      }
-    }
-    return d;
-  }
-
-  private long getLongValue(String name, long d) {
-    List<ConfigurationNode> properties = getRootNode().getChildren();
-    if (properties == null || properties.size() == 0) {
-      return d;
-    }
-    for (ConfigurationNode p : properties) {
-      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
-          && name.equals(p.getChildren("name").get(0).getValue())) {
-        return Long.parseLong((String) p.getChildren("value").get(0).getValue());
-      }
-    }
-    return d;
-  }
-
-  private float getFloatValue(String name, float d) {
-    List<ConfigurationNode> properties = getRootNode().getChildren();
-    if (properties == null || properties.size() == 0) {
-      return d;
-    }
-    for (ConfigurationNode p : properties) {
-      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
-          && name.equals(p.getChildren("name").get(0).getValue())) {
-        return Float.parseFloat((String) p.getChildren("value").get(0).getValue());
-      }
-    }
-    return d;
-  }
-
-  private boolean getBooleanValue(String name, boolean d) {
-    List<ConfigurationNode> properties = getRootNode().getChildren();
-    if (properties == null || properties.size() == 0) {
-      return d;
-    }
-    for (ConfigurationNode p : properties) {
-      if (p.getChildren("name") != null && p.getChildren("name").size() > 0
-          && name.equals(p.getChildren("name").get(0).getValue())) {
-        return Boolean.parseBoolean((String) p.getChildren("value").get(0).getValue());
-      }
-    }
-    return d;
-  }
-
-  public String getString(ConfVars c) {
-    return getString(c.name(), c.getVarName(), c.getStringValue());
-  }
-
-  public String getString(String envName, String propertyName, String defaultValue) {
-    if (System.getenv(envName) != null) {
-      return System.getenv(envName);
-    }
-    if (System.getProperty(propertyName) != null) {
-      return System.getProperty(propertyName);
-    }
-
-    return getStringValue(propertyName, defaultValue);
-  }
-
-  public int getInt(ConfVars c) {
-    return getInt(c.name(), c.getVarName(), c.getIntValue());
-  }
-
-  public int getInt(String envName, String propertyName, int defaultValue) {
-    if (System.getenv(envName) != null) {
-      return Integer.parseInt(System.getenv(envName));
-    }
-
-    if (System.getProperty(propertyName) != null) {
-      return Integer.parseInt(System.getProperty(propertyName));
-    }
-    return getIntValue(propertyName, defaultValue);
-  }
-
-  public long getLong(ConfVars c) {
-    return getLong(c.name(), c.getVarName(), c.getLongValue());
-  }
-
-  public long getLong(String envName, String propertyName, long defaultValue) {
-    if (System.getenv(envName) != null) {
-      return Long.parseLong(System.getenv(envName));
-    }
-
-    if (System.getProperty(propertyName) != null) {
-      return Long.parseLong(System.getProperty(propertyName));
-    }
-    return getLongValue(propertyName, defaultValue);
-  }
-
-  public float getFloat(ConfVars c) {
-    return getFloat(c.name(), c.getVarName(), c.getFloatValue());
-  }
-
-  public float getFloat(String envName, String propertyName, float defaultValue) {
-    if (System.getenv(envName) != null) {
-      return Float.parseFloat(System.getenv(envName));
-    }
-    if (System.getProperty(propertyName) != null) {
-      return Float.parseFloat(System.getProperty(propertyName));
-    }
-    return getFloatValue(propertyName, defaultValue);
-  }
-
-  public boolean getBoolean(ConfVars c) {
-    return getBoolean(c.name(), c.getVarName(), c.getBooleanValue());
-  }
-
-  public boolean getBoolean(String envName, String propertyName, boolean defaultValue) {
-    if (System.getenv(envName) != null) {
-      return Boolean.parseBoolean(System.getenv(envName));
-    }
-
-    if (System.getProperty(propertyName) != null) {
-      return Boolean.parseBoolean(System.getProperty(propertyName));
-    }
-    return getBooleanValue(propertyName, defaultValue);
-  }
-
-  public boolean useSsl() {
-    return getBoolean(ConfVars.ZEPPELIN_SSL);
-  }
-
-  public boolean useClientAuth() {
-    return getBoolean(ConfVars.ZEPPELIN_SSL_CLIENT_AUTH);
-  }
-
-  public int getServerPort() {
-    return getInt(ConfVars.ZEPPELIN_PORT);
-  }
-
-  public int getWebSocketPort() {
-    int port = getInt(ConfVars.ZEPPELIN_WEBSOCKET_PORT);
-    if (port < 0) {
-      return getServerPort() + 1;
-    } else {
-      return port;
-    }
-  }
-
-  public String getKeyStorePath() {
-    return getRelativeDir(ConfVars.ZEPPELIN_SSL_KEYSTORE_PATH);
-  }
-
-  public String getKeyStoreType() {
-    return getString(ConfVars.ZEPPELIN_SSL_KEYSTORE_TYPE);
-  }
-
-  public String getKeyStorePassword() {
-    return getString(ConfVars.ZEPPELIN_SSL_KEYSTORE_PASSWORD);
-  }
-
-  public String getKeyManagerPassword() {
-    String password = getString(ConfVars.ZEPPELIN_SSL_KEY_MANAGER_PASSWORD);
-    if (password == null) {
-      return getKeyStorePassword();
-    } else {
-      return password;
-    }
-  }
-
-  public String getTrustStorePath() {
-    String path = getString(ConfVars.ZEPPELIN_SSL_TRUSTSTORE_PATH);
-    if (path == null) {
-      return getKeyStorePath();
-    } else {
-      return getRelativeDir(path);
-    }
-  }
-
-  public String getTrustStoreType() {
-    String type = getString(ConfVars.ZEPPELIN_SSL_TRUSTSTORE_TYPE);
-    if (type == null) {
-      return getKeyStoreType();
-    } else {
-      return type;
-    }
-  }
-
-  public String getTrustStorePassword() {
-    String password = getString(ConfVars.ZEPPELIN_SSL_TRUSTSTORE_PASSWORD);
-    if (password == null) {
-      return getKeyStorePassword();
-    } else {
-      return password;
-    }
-  }
-
-  public String getNotebookDir() {
-    return getRelativeDir(ConfVars.ZEPPELIN_NOTEBOOK_DIR);
-  }
-
-  public String getInterpreterDir() {
-    return getRelativeDir(ConfVars.ZEPPELIN_INTERPRETER_DIR);
-  }
-
-  public String getInterpreterSettingPath() {
-    return getRelativeDir("conf/interpreter.json");
-  }
-
-  public String getInterpreterRemoteRunnerPath() {
-    return getRelativeDir(ConfVars.ZEPPELIN_INTERPRETER_REMOTE_RUNNER);
-  }
-
-  public String getRelativeDir(ConfVars c) {
-    return getRelativeDir(getString(c));
-  }
-
-  public String getRelativeDir(String path) {
-    if (path != null && path.startsWith("/")) {
-      return path;
-    } else {
-      return getString(ConfVars.ZEPPELIN_HOME) + "/" + path;
-    }
-  }
-
-
-  /**
-   * Wrapper class.
-   *
-   * @author Leemoonsoo
-   *
-   */
-  public static enum ConfVars {
-    ZEPPELIN_HOME("zeppelin.home", "../"),
-    ZEPPELIN_PORT("zeppelin.server.port", 8080),
-    // negative websocket port denotes that server port + 1 should be used
-    ZEPPELIN_WEBSOCKET_PORT("zeppelin.websocket.port", -1),
-    ZEPPELIN_SSL("zeppelin.ssl", false),
-    ZEPPELIN_SSL_CLIENT_AUTH("zeppelin.ssl.client.auth", false),
-    ZEPPELIN_SSL_KEYSTORE_PATH("zeppelin.ssl.keystore.path", "conf/keystore"),
-    ZEPPELIN_SSL_KEYSTORE_TYPE("zeppelin.ssl.keystore.type", "JKS"),
-    ZEPPELIN_SSL_KEYSTORE_PASSWORD("zeppelin.ssl.keystore.password", ""),
-    ZEPPELIN_SSL_KEY_MANAGER_PASSWORD("zeppelin.ssl.key.manager.password", null),
-    ZEPPELIN_SSL_TRUSTSTORE_PATH("zeppelin.ssl.truststore.path", null),
-    ZEPPELIN_SSL_TRUSTSTORE_TYPE("zeppelin.ssl.truststore.type", null),
-    ZEPPELIN_SSL_TRUSTSTORE_PASSWORD("zeppelin.ssl.truststore.password", null),
-    ZEPPELIN_WAR("zeppelin.war", "../zeppelin-web/src/main/webapp"),
-    ZEPPELIN_API_WAR("zeppelin.api.war", "../zeppelin-docs/src/main/swagger"),
-    ZEPPELIN_INTERPRETERS("zeppelin.interpreters", "com.nflabs.zeppelin.spark.SparkInterpreter,"
-        + "com.nflabs.zeppelin.spark.PySparkInterpreter,"
-        + "com.nflabs.zeppelin.spark.SparkSqlInterpreter,"
-        + "com.nflabs.zeppelin.spark.DepInterpreter,"
-        + "com.nflabs.zeppelin.markdown.Markdown,"
-        + "com.nflabs.zeppelin.shell.ShellInterpreter"),
-        ZEPPELIN_INTERPRETER_DIR("zeppelin.interpreter.dir", "interpreter"),
-        ZEPPELIN_ENCODING("zeppelin.encoding", "UTF-8"),
-        ZEPPELIN_NOTEBOOK_DIR("zeppelin.notebook.dir", "notebook"),
-    ZEPPELIN_INTERPRETER_REMOTE_RUNNER("zeppelin.interpreter.remoterunner", "bin/interpreter.sh"),
-    // Decide when new note is created, interpreter settings will be binded automatically or not.
-    ZEPPELIN_NOTEBOOK_AUTO_INTERPRETER_BINDING("zeppelin.notebook.autoInterpreterBinding", true);
-
-    private String varName;
-    @SuppressWarnings("rawtypes")
-    private Class varClass;
-    private String stringValue;
-    private VarType type;
-    private int intValue;
-    private float floatValue;
-    private boolean booleanValue;
-    private long longValue;
-
-
-    ConfVars(String varName, String varValue) {
-      this.varName = varName;
-      this.varClass = String.class;
-      this.stringValue = varValue;
-      this.intValue = -1;
-      this.floatValue = -1;
-      this.longValue = -1;
-      this.booleanValue = false;
-      this.type = VarType.STRING;
-    }
-
-    ConfVars(String varName, int intValue) {
-      this.varName = varName;
-      this.varClass = Integer.class;
-      this.stringValue = null;
-      this.intValue = intValue;
-      this.floatValue = -1;
-      this.longValue = -1;
-      this.booleanValue = false;
-      this.type = VarType.INT;
-    }
-
-    ConfVars(String varName, long longValue) {
-      this.varName = varName;
-      this.varClass = Integer.class;
-      this.stringValue = null;
-      this.intValue = -1;
-      this.floatValue = -1;
-      this.longValue = longValue;
-      this.booleanValue = false;
-      this.type = VarType.INT;
-    }
-
-    ConfVars(String varName, float floatValue) {
-      this.varName = varName;
-      this.varClass = Float.class;
-      this.stringValue = null;
-      this.intValue = -1;
-      this.longValue = -1;
-      this.floatValue = floatValue;
-      this.booleanValue = false;
-      this.type = VarType.FLOAT;
-    }
-
-    ConfVars(String varName, boolean booleanValue) {
-      this.varName = varName;
-      this.varClass = Boolean.class;
-      this.stringValue = null;
-      this.intValue = -1;
-      this.longValue = -1;
-      this.floatValue = -1;
-      this.booleanValue = booleanValue;
-      this.type = VarType.BOOLEAN;
-    }
-
-    public String getVarName() {
-      return varName;
-    }
-
-    @SuppressWarnings("rawtypes")
-    public Class getVarClass() {
-      return varClass;
-    }
-
-    public int getIntValue() {
-      return intValue;
-    }
-
-    public long getLongValue() {
-      return longValue;
-    }
-
-    public float getFloatValue() {
-      return floatValue;
-    }
-
-    public String getStringValue() {
-      return stringValue;
-    }
-
-    public boolean getBooleanValue() {
-      return booleanValue;
-    }
-
-    public VarType getType() {
-      return type;
-    }
-
-    enum VarType {
-      STRING {
-        @Override
-        void checkType(String value) throws Exception {}
-      },
-      INT {
-        @Override
-        void checkType(String value) throws Exception {
-          Integer.valueOf(value);
-        }
-      },
-      LONG {
-        @Override
-        void checkType(String value) throws Exception {
-          Long.valueOf(value);
-        }
-      },
-      FLOAT {
-        @Override
-        void checkType(String value) throws Exception {
-          Float.valueOf(value);
-        }
-      },
-      BOOLEAN {
-        @Override
-        void checkType(String value) throws Exception {
-          Boolean.valueOf(value);
-        }
-      };
-
-      boolean isType(String value) {
-        try {
-          checkType(value);
-        } catch (Exception e) {
-          return false;
-        }
-        return true;
-      }
-
-      String typeString() {
-        return name().toUpperCase();
-      }
-
-      abstract void checkType(String value) throws Exception;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterFactory.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterFactory.java
deleted file mode 100644
index 5d65405..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterFactory.java
+++ /dev/null
@@ -1,596 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.OutputStreamWriter;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration;
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration.ConfVars;
-import com.nflabs.zeppelin.interpreter.Interpreter.RegisteredInterpreter;
-import com.nflabs.zeppelin.interpreter.remote.RemoteInterpreter;
-
-/**
- * Manage interpreters.
- *
- */
-public class InterpreterFactory {
-  Logger logger = LoggerFactory.getLogger(InterpreterFactory.class);
-
-  private Map<String, URLClassLoader> cleanCl = Collections
-      .synchronizedMap(new HashMap<String, URLClassLoader>());
-
-  private ZeppelinConfiguration conf;
-  String[] interpreterClassList;
-
-  private Map<String, InterpreterSetting> interpreterSettings =
-      new HashMap<String, InterpreterSetting>();
-
-  private Map<String, List<String>> interpreterBindings = new HashMap<String, List<String>>();
-
-  private Gson gson;
-
-  private InterpreterOption defaultOption;
-
-  public InterpreterFactory(ZeppelinConfiguration conf) throws InterpreterException, IOException {
-    this(conf, new InterpreterOption(true));
-  }
-
-
-  public InterpreterFactory(ZeppelinConfiguration conf, InterpreterOption defaultOption)
-      throws InterpreterException, IOException {
-    this.conf = conf;
-    this.defaultOption = defaultOption;
-    String replsConf = conf.getString(ConfVars.ZEPPELIN_INTERPRETERS);
-    interpreterClassList = replsConf.split(",");
-
-    GsonBuilder builder = new GsonBuilder();
-    builder.setPrettyPrinting();
-    builder.registerTypeAdapter(Interpreter.class, new InterpreterSerializer());
-    gson = builder.create();
-
-    init();
-  }
-
-  private void init() throws InterpreterException, IOException {
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-
-    // Load classes
-    File[] interpreterDirs = new File(conf.getInterpreterDir()).listFiles();
-    if (interpreterDirs != null) {
-      for (File path : interpreterDirs) {
-        logger.info("Reading " + path.getAbsolutePath());
-        URL[] urls = null;
-        try {
-          urls = recursiveBuildLibList(path);
-        } catch (MalformedURLException e1) {
-          logger.error("Can't load jars ", e1);
-        }
-        URLClassLoader ccl = new URLClassLoader(urls, oldcl);
-
-        for (String className : interpreterClassList) {
-          try {
-            Class.forName(className, true, ccl);
-            Set<String> keys = Interpreter.registeredInterpreters.keySet();
-            for (String intName : keys) {
-              if (className.equals(
-                  Interpreter.registeredInterpreters.get(intName).getClassName())) {
-                Interpreter.registeredInterpreters.get(intName).setPath(path.getAbsolutePath());
-                logger.info("Interpreter " + intName + " found. class=" + className);
-                cleanCl.put(path.getAbsolutePath(), ccl);
-              }
-            }
-          } catch (ClassNotFoundException e) {
-            // nothing to do
-          }
-        }
-      }
-    }
-
-    loadFromFile();
-
-    // if no interpreter settings are loaded, create default set
-    synchronized (interpreterSettings) {
-      if (interpreterSettings.size() == 0) {
-        HashMap<String, List<RegisteredInterpreter>> groupClassNameMap =
-            new HashMap<String, List<RegisteredInterpreter>>();
-
-        for (String k : Interpreter.registeredInterpreters.keySet()) {
-          RegisteredInterpreter info = Interpreter.registeredInterpreters.get(k);
-
-          if (!groupClassNameMap.containsKey(info.getGroup())) {
-            groupClassNameMap.put(info.getGroup(), new LinkedList<RegisteredInterpreter>());
-          }
-
-          groupClassNameMap.get(info.getGroup()).add(info);
-        }
-
-        for (String className : interpreterClassList) {
-          for (String groupName : groupClassNameMap.keySet()) {
-            List<RegisteredInterpreter> infos = groupClassNameMap.get(groupName);
-
-            boolean found = false;
-            Properties p = new Properties();
-            for (RegisteredInterpreter info : infos) {
-              if (found == false && info.getClassName().equals(className)) {
-                found = true;
-              }
-
-              for (String k : info.getProperties().keySet()) {
-                p.put(k, info.getProperties().get(k).getDefaultValue());
-              }
-            }
-
-            if (found) {
-              // add all interpreters in group
-              add(groupName, groupName, defaultOption, p);
-              groupClassNameMap.remove(groupName);
-              break;
-            }
-          }
-        }
-      }
-    }
-
-    for (String settingId : interpreterSettings.keySet()) {
-      InterpreterSetting setting = interpreterSettings.get(settingId);
-      logger.info("Interpreter setting group {} : id={}, name={}",
-          setting.getGroup(), settingId, setting.getName());
-      for (Interpreter interpreter : setting.getInterpreterGroup()) {
-        logger.info("  className = {}", interpreter.getClassName());
-      }
-    }
-  }
-
-  private void loadFromFile() throws IOException {
-    GsonBuilder builder = new GsonBuilder();
-    builder.setPrettyPrinting();
-    builder.registerTypeAdapter(Interpreter.class, new InterpreterSerializer());
-    Gson gson = builder.create();
-
-    File settingFile = new File(conf.getInterpreterSettingPath());
-    if (!settingFile.exists()) {
-      // nothing to read
-      return;
-    }
-    FileInputStream fis = new FileInputStream(settingFile);
-    InputStreamReader isr = new InputStreamReader(fis);
-    BufferedReader bufferedReader = new BufferedReader(isr);
-    StringBuilder sb = new StringBuilder();
-    String line;
-    while ((line = bufferedReader.readLine()) != null) {
-      sb.append(line);
-    }
-    isr.close();
-    fis.close();
-
-    String json = sb.toString();
-    InterpreterInfoSaving info = gson.fromJson(json, InterpreterInfoSaving.class);
-
-    for (String k : info.interpreterSettings.keySet()) {
-      InterpreterSetting setting = info.interpreterSettings.get(k);
-
-      // Always use separate interpreter process
-      // While we decided to turn this feature on always (without providing
-      // enable/disable option on GUI).
-      // previously created setting should turn this feature on here.
-      setting.getOption().setRemote(true);
-
-      InterpreterGroup interpreterGroup = createInterpreterGroup(
-          setting.getGroup(),
-          setting.getOption(),
-          setting.getProperties());
-
-      InterpreterSetting intpSetting = new InterpreterSetting(
-          setting.id(),
-          setting.getName(),
-          setting.getGroup(),
-          setting.getOption(),
-          interpreterGroup);
-
-      interpreterSettings.put(k, intpSetting);
-    }
-
-    this.interpreterBindings = info.interpreterBindings;
-  }
-
-
-  private void saveToFile() throws IOException {
-    String jsonString;
-
-    synchronized (interpreterSettings) {
-      InterpreterInfoSaving info = new InterpreterInfoSaving();
-      info.interpreterBindings = interpreterBindings;
-      info.interpreterSettings = interpreterSettings;
-
-      jsonString = gson.toJson(info);
-    }
-
-    File settingFile = new File(conf.getInterpreterSettingPath());
-    if (!settingFile.exists()) {
-      settingFile.createNewFile();
-    }
-
-    FileOutputStream fos = new FileOutputStream(settingFile, false);
-    OutputStreamWriter out = new OutputStreamWriter(fos);
-    out.append(jsonString);
-    out.close();
-    fos.close();
-  }
-
-  private RegisteredInterpreter getRegisteredReplInfoFromClassName(String clsName) {
-    Set<String> keys = Interpreter.registeredInterpreters.keySet();
-    for (String intName : keys) {
-      RegisteredInterpreter info = Interpreter.registeredInterpreters.get(intName);
-      if (clsName.equals(info.getClassName())) {
-        return info;
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Return ordered interpreter setting list.
-   * The list does not contain more than one setting from the same interpreter class.
-   * Order by InterpreterClass (order defined by ZEPPELIN_INTERPRETERS), Interpreter setting name
-   * @return
-   */
-  public List<String> getDefaultInterpreterSettingList() {
-    // this list will contain default interpreter setting list
-    List<String> defaultSettings = new LinkedList<String>();
-
-    // to ignore the same interpreter group
-    Map<String, Boolean> interpreterGroupCheck = new HashMap<String, Boolean>();
-
-    List<InterpreterSetting> sortedSettings = get();
-
-    for (InterpreterSetting setting : sortedSettings) {
-      if (defaultSettings.contains(setting.id())) {
-        continue;
-      }
-
-      if (!interpreterGroupCheck.containsKey(setting.getGroup())) {
-        defaultSettings.add(setting.id());
-        interpreterGroupCheck.put(setting.getGroup(), true);
-      }
-    }
-    return defaultSettings;
-  }
-
-  public List<RegisteredInterpreter> getRegisteredInterpreterList() {
-    List<RegisteredInterpreter> registeredInterpreters = new LinkedList<RegisteredInterpreter>();
-
-    for (String className : interpreterClassList) {
-      registeredInterpreters.add(Interpreter.findRegisteredInterpreterByClassName(className));
-    }
-
-    return registeredInterpreters;
-  }
-
-  /**
-   * @param name user defined name
-   * @param groupName interpreter group name to instantiate
-   * @param properties
-   * @return
-   * @throws InterpreterException
-   * @throws IOException
-   */
-  public InterpreterGroup add(String name, String groupName,
-      InterpreterOption option, Properties properties)
-      throws InterpreterException, IOException {
-    synchronized (interpreterSettings) {
-      InterpreterGroup interpreterGroup = createInterpreterGroup(groupName, option, properties);
-
-      InterpreterSetting intpSetting = new InterpreterSetting(
-          name,
-          groupName,
-          option,
-          interpreterGroup);
-      interpreterSettings.put(intpSetting.id(), intpSetting);
-
-      saveToFile();
-      return interpreterGroup;
-    }
-  }
-
-  private InterpreterGroup createInterpreterGroup(String groupName,
-      InterpreterOption option,
-      Properties properties)
-      throws InterpreterException {
-    InterpreterGroup interpreterGroup = new InterpreterGroup();
-
-    for (String className : interpreterClassList) {
-      Set<String> keys = Interpreter.registeredInterpreters.keySet();
-      for (String intName : keys) {
-        RegisteredInterpreter info = Interpreter.registeredInterpreters
-            .get(intName);
-        if (info.getClassName().equals(className)
-            && info.getGroup().equals(groupName)) {
-          Interpreter intp;
-
-          if (option.isRemote()) {
-            intp = createRemoteRepl(info.getPath(),
-                info.getClassName(),
-                properties);
-          } else {
-            intp = createRepl(info.getPath(),
-                info.getClassName(),
-                properties);
-          }
-          interpreterGroup.add(intp);
-          intp.setInterpreterGroup(interpreterGroup);
-          break;
-        }
-      }
-    }
-    return interpreterGroup;
-  }
-
-  public void remove(String id) throws IOException {
-    synchronized (interpreterSettings) {
-      if (interpreterSettings.containsKey(id)) {
-        InterpreterSetting intp = interpreterSettings.get(id);
-        intp.getInterpreterGroup().close();
-        intp.getInterpreterGroup().destroy();
-
-        interpreterSettings.remove(id);
-        for (List<String> settings : interpreterBindings.values()) {
-          Iterator<String> it = settings.iterator();
-          while (it.hasNext()) {
-            String settingId = it.next();
-            if (settingId.equals(id)) {
-              it.remove();
-            }
-          }
-        }
-        saveToFile();
-      }
-    }
-  }
-
-  /**
-   * Get loaded interpreters
-   * @return
-   */
-  public List<InterpreterSetting> get() {
-    synchronized (interpreterSettings) {
-      List<InterpreterSetting> orderedSettings = new LinkedList<InterpreterSetting>();
-      List<InterpreterSetting> settings = new LinkedList<InterpreterSetting>(
-          interpreterSettings.values());
-      Collections.sort(settings, new Comparator<InterpreterSetting>(){
-        @Override
-        public int compare(InterpreterSetting o1, InterpreterSetting o2) {
-          return o1.getName().compareTo(o2.getName());
-        }
-      });
-
-      for (String className : interpreterClassList) {
-        for (InterpreterSetting setting : settings) {
-          for (InterpreterSetting orderedSetting : orderedSettings) {
-            if (orderedSetting.id().equals(setting.id())) {
-              continue;
-            }
-          }
-
-          for (Interpreter intp : setting.getInterpreterGroup()) {
-            if (className.equals(intp.getClassName())) {
-              boolean alreadyAdded = false;
-              for (InterpreterSetting st : orderedSettings) {
-                if (setting.id().equals(st.id())) {
-                  alreadyAdded = true;
-                }
-              }
-              if (alreadyAdded == false) {
-                orderedSettings.add(setting);
-              }
-            }
-          }
-        }
-      }
-      return orderedSettings;
-    }
-  }
-
-  public InterpreterSetting get(String name) {
-    synchronized (interpreterSettings) {
-      return interpreterSettings.get(name);
-    }
-  }
-
-  public void putNoteInterpreterSettingBinding(String noteId,
-      List<String> settingList) throws IOException {
-    synchronized (interpreterSettings) {
-      interpreterBindings.put(noteId, settingList);
-      saveToFile();
-    }
-  }
-
-  public void removeNoteInterpreterSettingBinding(String noteId) {
-    synchronized (interpreterSettings) {
-      interpreterBindings.remove(noteId);
-    }
-  }
-
-  public List<String> getNoteInterpreterSettingBinding(String noteId) {
-    LinkedList<String> bindings = new LinkedList<String>();
-    synchronized (interpreterSettings) {
-      List<String> settingIds = interpreterBindings.get(noteId);
-      if (settingIds != null) {
-        bindings.addAll(settingIds);
-      }
-    }
-    return bindings;
-  }
-
-  /**
-   * Change interpreter property and restart
-   * @param name
-   * @param properties
-   * @throws IOException
-   */
-  public void setPropertyAndRestart(String id, InterpreterOption option,
-      Properties properties) throws IOException {
-    synchronized (interpreterSettings) {
-      InterpreterSetting intpsetting = interpreterSettings.get(id);
-      if (intpsetting != null) {
-        intpsetting.getInterpreterGroup().close();
-        intpsetting.getInterpreterGroup().destroy();
-
-        intpsetting.setOption(option);
-
-        InterpreterGroup interpreterGroup = createInterpreterGroup(
-            intpsetting.getGroup(), option, properties);
-        intpsetting.setInterpreterGroup(interpreterGroup);
-        saveToFile();
-      } else {
-        throw new InterpreterException("Interpreter setting id " + id
-            + " not found");
-      }
-    }
-  }
-
-  public void restart(String id) {
-    synchronized (interpreterSettings) {
-      synchronized (interpreterSettings) {
-        InterpreterSetting intpsetting = interpreterSettings.get(id);
-        if (intpsetting != null) {
-          intpsetting.getInterpreterGroup().close();
-          intpsetting.getInterpreterGroup().destroy();
-
-          InterpreterGroup interpreterGroup = createInterpreterGroup(
-              intpsetting.getGroup(), intpsetting.getOption(), intpsetting.getProperties());
-          intpsetting.setInterpreterGroup(interpreterGroup);
-        } else {
-          throw new InterpreterException("Interpreter setting id " + id
-              + " not found");
-        }
-      }
-    }
-  }
-
-
-  public void close() {
-    synchronized (interpreterSettings) {
-      synchronized (interpreterSettings) {
-        Collection<InterpreterSetting> intpsettings = interpreterSettings.values();
-        for (InterpreterSetting intpsetting : intpsettings) {
-          intpsetting.getInterpreterGroup().close();
-          intpsetting.getInterpreterGroup().destroy();
-        }
-      }
-    }
-  }
-
-  private Interpreter createRepl(String dirName, String className,
-      Properties property)
-      throws InterpreterException {
-    logger.info("Create repl {} from {}", className, dirName);
-
-    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
-    try {
-
-      URLClassLoader ccl = cleanCl.get(dirName);
-      if (ccl == null) {
-        // classloader fallback
-        ccl = URLClassLoader.newInstance(new URL[] {}, oldcl);
-      }
-
-      boolean separateCL = true;
-      try { // check if server's classloader has driver already.
-        Class cls = this.getClass().forName(className);
-        if (cls != null) {
-          separateCL = false;
-        }
-      } catch (Exception e) {
-        // nothing to do.
-      }
-
-      URLClassLoader cl;
-
-      if (separateCL == true) {
-        cl = URLClassLoader.newInstance(new URL[] {}, ccl);
-      } else {
-        cl = ccl;
-      }
-      Thread.currentThread().setContextClassLoader(cl);
-
-      Class<Interpreter> replClass = (Class<Interpreter>) cl.loadClass(className);
-      Constructor<Interpreter> constructor =
-          replClass.getConstructor(new Class[] {Properties.class});
-      Interpreter repl = constructor.newInstance(property);
-      repl.setClassloaderUrls(ccl.getURLs());
-      LazyOpenInterpreter intp = new LazyOpenInterpreter(
-          new ClassloaderInterpreter(repl, cl));
-      return intp;
-    } catch (SecurityException e) {
-      throw new InterpreterException(e);
-    } catch (NoSuchMethodException e) {
-      throw new InterpreterException(e);
-    } catch (IllegalArgumentException e) {
-      throw new InterpreterException(e);
-    } catch (InstantiationException e) {
-      throw new InterpreterException(e);
-    } catch (IllegalAccessException e) {
-      throw new InterpreterException(e);
-    } catch (InvocationTargetException e) {
-      throw new InterpreterException(e);
-    } catch (ClassNotFoundException e) {
-      throw new InterpreterException(e);
-    } finally {
-      Thread.currentThread().setContextClassLoader(oldcl);
-    }
-  }
-
-
-  private Interpreter createRemoteRepl(String interpreterPath, String className,
-      Properties property) {
-
-    LazyOpenInterpreter intp = new LazyOpenInterpreter(new RemoteInterpreter(
-        property, className, conf.getInterpreterRemoteRunnerPath(), interpreterPath));
-    return intp;
-  }
-
-
-  private URL[] recursiveBuildLibList(File path) throws MalformedURLException {
-    URL[] urls = new URL[0];
-    if (path == null || path.exists() == false) {
-      return urls;
-    } else if (path.getName().startsWith(".")) {
-      return urls;
-    } else if (path.isDirectory()) {
-      File[] files = path.listFiles();
-      if (files != null) {
-        for (File f : files) {
-          urls = (URL[]) ArrayUtils.addAll(urls, recursiveBuildLibList(f));
-        }
-      }
-      return urls;
-    } else {
-      return new URL[] {path.toURI().toURL()};
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterInfoSaving.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterInfoSaving.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterInfoSaving.java
deleted file mode 100644
index 02335ae..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterInfoSaving.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- *
- */
-public class InterpreterInfoSaving {
-  public Map<String, InterpreterSetting> interpreterSettings;
-  public Map<String, List<String>> interpreterBindings;
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterOption.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterOption.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterOption.java
deleted file mode 100644
index 614cc79..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterOption.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-/**
- *
- */
-public class InterpreterOption {
-  boolean remote;
-
-  public InterpreterOption() {
-    remote = false;
-  }
-
-  public InterpreterOption(boolean remote) {
-    this.remote = remote;
-  }
-
-  public boolean isRemote() {
-    return remote;
-  }
-
-  public void setRemote(boolean remote) {
-    this.remote = remote;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterSerializer.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterSerializer.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterSerializer.java
deleted file mode 100644
index 4d70ccd..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterSerializer.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.lang.reflect.Type;
-
-import com.google.gson.JsonDeserializationContext;
-import com.google.gson.JsonDeserializer;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParseException;
-import com.google.gson.JsonSerializationContext;
-import com.google.gson.JsonSerializer;
-
-
-/**
- * Interpreter class serializer for gson
- *
- */
-public class InterpreterSerializer implements JsonSerializer<Interpreter>,
-  JsonDeserializer<Interpreter> {
-
-  @Override
-  public JsonElement serialize(Interpreter interpreter, Type type,
-      JsonSerializationContext context) {
-    JsonObject json = new JsonObject();
-    json.addProperty("class", interpreter.getClassName());
-    json.addProperty(
-        "name",
-        Interpreter.findRegisteredInterpreterByClassName(
-            interpreter.getClassName()).getName());
-    return json;
-  }
-
-  @Override
-  public Interpreter deserialize(JsonElement json, Type typeOfT,
-      JsonDeserializationContext context) throws JsonParseException {
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterSetting.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterSetting.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterSetting.java
deleted file mode 100644
index 3d0a017..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/interpreter/InterpreterSetting.java
+++ /dev/null
@@ -1,91 +0,0 @@
-package com.nflabs.zeppelin.interpreter;
-
-import java.util.Properties;
-import java.util.Random;
-
-import com.nflabs.zeppelin.notebook.utility.IdHashes;
-
-/**
- * Interpreter settings
- */
-public class InterpreterSetting {
-  private String id;
-  private String name;
-  private String group;
-  private String description;
-  private Properties properties;
-  private InterpreterGroup interpreterGroup;
-  private InterpreterOption option;
-
-  public InterpreterSetting(String id, String name,
-      String group,
-      InterpreterOption option,
-      InterpreterGroup interpreterGroup) {
-    this.id = id;
-    this.name = name;
-    this.group = group;
-    this.properties = interpreterGroup.getProperty();
-    this.option = option;
-    this.interpreterGroup = interpreterGroup;
-  }
-
-  public InterpreterSetting(String name,
-      String group,
-      InterpreterOption option,
-      InterpreterGroup interpreterGroup) {
-    this(generateId(), name, group, option, interpreterGroup);
-  }
-
-  public String id() {
-    return id;
-  }
-
-  private static String generateId() {
-    return IdHashes.encode(System.currentTimeMillis() + new Random().nextInt());
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  public String getDescription() {
-    return description;
-  }
-
-  public void setDescription(String desc) {
-    this.description = desc;
-  }
-
-  public String getGroup() {
-    return group;
-  }
-
-  public InterpreterGroup getInterpreterGroup() {
-    return interpreterGroup;
-  }
-
-  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
-    this.interpreterGroup = interpreterGroup;
-    this.properties = interpreterGroup.getProperty();
-  }
-
-  public Properties getProperties() {
-    return properties;
-  }
-
-  public InterpreterOption getOption() {
-    if (option == null) {
-      option = new InterpreterOption();
-    }
-
-    return option;
-  }
-
-  public void setOption(InterpreterOption option) {
-    this.option = option;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/JobListenerFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/JobListenerFactory.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/JobListenerFactory.java
deleted file mode 100644
index ef69b2a..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/JobListenerFactory.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package com.nflabs.zeppelin.notebook;
-
-import com.nflabs.zeppelin.scheduler.JobListener;
-
-/**
- * TODO(moon): provide description.
- * 
- * @author Leemoonsoo
- *
- */
-public interface JobListenerFactory {
-  public JobListener getParagraphJobListener(Note note);
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Note.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Note.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Note.java
deleted file mode 100644
index df0a91d..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Note.java
+++ /dev/null
@@ -1,350 +0,0 @@
-package com.nflabs.zeppelin.notebook;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration;
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration.ConfVars;
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.notebook.utility.IdHashes;
-import com.nflabs.zeppelin.scheduler.Job;
-import com.nflabs.zeppelin.scheduler.Job.Status;
-import com.nflabs.zeppelin.scheduler.JobListener;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-
-/**
- * Binded interpreters for a note
- */
-public class Note implements Serializable, JobListener {
-  transient Logger logger = LoggerFactory.getLogger(Note.class);
-  List<Paragraph> paragraphs = new LinkedList<Paragraph>();
-  private String name;
-  private String id;
-
-  private transient NoteInterpreterLoader replLoader;
-  private transient ZeppelinConfiguration conf;
-  private transient JobListenerFactory jobListenerFactory;
-
-  /**
-   * note configurations.
-   *
-   * - looknfeel - cron
-   */
-  private Map<String, Object> config = new HashMap<String, Object>();
-
-  /**
-   * note information.
-   *
-   * - cron : cron expression validity.
-   */
-  private Map<String, Object> info = new HashMap<String, Object>();
-
-  public Note() {}
-
-  public Note(ZeppelinConfiguration conf, NoteInterpreterLoader replLoader,
-      JobListenerFactory jobListenerFactory, org.quartz.Scheduler quartzSched) {
-    this.conf = conf;
-    this.replLoader = replLoader;
-    this.jobListenerFactory = jobListenerFactory;
-    generateId();
-  }
-
-  private void generateId() {
-    id = IdHashes.encode(System.currentTimeMillis() + new Random().nextInt());
-  }
-
-  public String id() {
-    return id;
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  public NoteInterpreterLoader getNoteReplLoader() {
-    return replLoader;
-  }
-
-  public void setReplLoader(NoteInterpreterLoader replLoader) {
-    this.replLoader = replLoader;
-  }
-
-  public void setZeppelinConfiguration(ZeppelinConfiguration conf) {
-    this.conf = conf;
-  }
-
-  /**
-   * Add paragraph last.
-   *
-   * @param p
-   */
-  public Paragraph addParagraph() {
-    Paragraph p = new Paragraph(this, replLoader);
-    synchronized (paragraphs) {
-      paragraphs.add(p);
-    }
-    return p;
-  }
-
-  /**
-   * Insert paragraph in given index.
-   *
-   * @param index
-   * @param p
-   */
-  public Paragraph insertParagraph(int index) {
-    Paragraph p = new Paragraph(this, replLoader);
-    synchronized (paragraphs) {
-      paragraphs.add(index, p);
-    }
-    return p;
-  }
-
-  /**
-   * Remove paragraph by id.
-   *
-   * @param paragraphId
-   * @return
-   */
-  public Paragraph removeParagraph(String paragraphId) {
-    synchronized (paragraphs) {
-      for (int i = 0; i < paragraphs.size(); i++) {
-        Paragraph p = paragraphs.get(i);
-        if (p.getId().equals(paragraphId)) {
-          paragraphs.remove(i);
-          return p;
-        }
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Move paragraph into the new index (order from 0 ~ n-1).
-   *
-   * @param paragraphId
-   * @param index new index
-   */
-  public void moveParagraph(String paragraphId, int index) {
-    synchronized (paragraphs) {
-      int oldIndex = -1;
-      Paragraph p = null;
-
-      if (index < 0 || index >= paragraphs.size()) {
-        return;
-      }
-
-      for (int i = 0; i < paragraphs.size(); i++) {
-        if (paragraphs.get(i).getId().equals(paragraphId)) {
-          oldIndex = i;
-          if (oldIndex == index) {
-            return;
-          }
-          p = paragraphs.remove(i);
-        }
-      }
-
-      if (p == null) {
-        return;
-      } else {
-        if (oldIndex < index) {
-          paragraphs.add(index, p);
-        } else {
-          paragraphs.add(index, p);
-        }
-      }
-    }
-  }
-
-  public boolean isLastParagraph(String paragraphId) {
-    if (!paragraphs.isEmpty()) {
-      synchronized (paragraphs) {
-        if (paragraphId.equals(paragraphs.get(paragraphs.size() - 1).getId())) {
-          return true;
-        }
-      }
-      return false;
-    }
-    /** because empty list, cannot remove nothing right? */
-    return true;
-  }
-
-  public Paragraph getParagraph(String paragraphId) {
-    synchronized (paragraphs) {
-      for (Paragraph p : paragraphs) {
-        if (p.getId().equals(paragraphId)) {
-          return p;
-        }
-      }
-    }
-    return null;
-  }
-
-  public Paragraph getLastParagraph() {
-    synchronized (paragraphs) {
-      return paragraphs.get(paragraphs.size() - 1);
-    }
-  }
-
-  /**
-   * Run all paragraphs sequentially.
-   *
-   * @param jobListener
-   */
-  public void runAll() {
-    synchronized (paragraphs) {
-      for (Paragraph p : paragraphs) {
-        p.setNoteReplLoader(replLoader);
-        p.setListener(jobListenerFactory.getParagraphJobListener(this));
-        Interpreter intp = replLoader.get(p.getRequiredReplName());
-        intp.getScheduler().submit(p);
-      }
-    }
-  }
-
-  /**
-   * Run a single paragraph.
-   *
-   * @param paragraphId
-   */
-  public void run(String paragraphId) {
-    Paragraph p = getParagraph(paragraphId);
-    p.setNoteReplLoader(replLoader);
-    p.setListener(jobListenerFactory.getParagraphJobListener(this));
-    Interpreter intp = replLoader.get(p.getRequiredReplName());
-    if (intp == null) {
-      throw new InterpreterException("Interpreter " + p.getRequiredReplName() + " not found");
-    }
-    intp.getScheduler().submit(p);
-  }
-
-  public List<String> completion(String paragraphId, String buffer, int cursor) {
-    Paragraph p = getParagraph(paragraphId);
-    p.setNoteReplLoader(replLoader);
-    p.setListener(jobListenerFactory.getParagraphJobListener(this));
-    return p.completion(buffer, cursor);
-  }
-
-  public List<Paragraph> getParagraphs() {
-    synchronized (paragraphs) {
-      return new LinkedList<Paragraph>(paragraphs);
-    }
-  }
-
-  public void persist() throws IOException {
-    GsonBuilder gsonBuilder = new GsonBuilder();
-    gsonBuilder.setPrettyPrinting();
-    Gson gson = gsonBuilder.create();
-
-    File dir = new File(conf.getNotebookDir() + "/" + id);
-    if (!dir.exists()) {
-      dir.mkdirs();
-    } else if (dir.isFile()) {
-      throw new RuntimeException("File already exists" + dir.toString());
-    }
-
-    File file = new File(conf.getNotebookDir() + "/" + id + "/note.json");
-    logger().info("Persist note {} into {}", id, file.getAbsolutePath());
-
-    String json = gson.toJson(this);
-    FileOutputStream out = new FileOutputStream(file);
-    out.write(json.getBytes(conf.getString(ConfVars.ZEPPELIN_ENCODING)));
-    out.close();
-  }
-
-  public void unpersist() throws IOException {
-    File dir = new File(conf.getNotebookDir() + "/" + id);
-
-    FileUtils.deleteDirectory(dir);
-  }
-
-  public static Note load(String id, ZeppelinConfiguration conf, NoteInterpreterLoader replLoader,
-      Scheduler scheduler, JobListenerFactory jobListenerFactory, org.quartz.Scheduler quartzSched)
-      throws IOException {
-    GsonBuilder gsonBuilder = new GsonBuilder();
-    gsonBuilder.setPrettyPrinting();
-    Gson gson = gsonBuilder.create();
-
-    File file = new File(conf.getNotebookDir() + "/" + id + "/note.json");
-    logger().info("Load note {} from {}", id, file.getAbsolutePath());
-
-    if (!file.isFile()) {
-      return null;
-    }
-
-    FileInputStream ins = new FileInputStream(file);
-    String json = IOUtils.toString(ins, conf.getString(ConfVars.ZEPPELIN_ENCODING));
-    Note note = gson.fromJson(json, Note.class);
-    note.setZeppelinConfiguration(conf);
-    note.setReplLoader(replLoader);
-    note.jobListenerFactory = jobListenerFactory;
-    for (Paragraph p : note.paragraphs) {
-      if (p.getStatus() == Status.PENDING || p.getStatus() == Status.RUNNING) {
-        p.setStatus(Status.ABORT);
-      }
-    }
-
-    return note;
-  }
-
-  public Map<String, Object> getConfig() {
-    if (config == null) {
-      config = new HashMap<String, Object>();
-    }
-    return config;
-  }
-
-  public void setConfig(Map<String, Object> config) {
-    this.config = config;
-  }
-
-  public Map<String, Object> getInfo() {
-    if (info == null) {
-      info = new HashMap<String, Object>();
-    }
-    return info;
-  }
-
-  public void setInfo(Map<String, Object> info) {
-    this.info = info;
-  }
-
-  @Override
-  public void beforeStatusChange(Job job, Status before, Status after) {
-    Paragraph p = (Paragraph) job;
-  }
-
-  @Override
-  public void afterStatusChange(Job job, Status before, Status after) {
-    Paragraph p = (Paragraph) job;
-  }
-
-  private static Logger logger() {
-    Logger logger = LoggerFactory.getLogger(Note.class);
-    return logger;
-  }
-
-  @Override
-  public void onProgressUpdate(Job job, int progress) {}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/NoteInterpreterLoader.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/NoteInterpreterLoader.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/NoteInterpreterLoader.java
deleted file mode 100644
index ba570f6..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/NoteInterpreterLoader.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package com.nflabs.zeppelin.notebook;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import com.nflabs.zeppelin.interpreter.InterpreterFactory;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterSetting;
-
-/**
- * Repl loader per note.
- */
-public class NoteInterpreterLoader {
-  private transient InterpreterFactory factory;
-  String noteId;
-
-  public NoteInterpreterLoader(InterpreterFactory factory) {
-    this.factory = factory;
-  }
-
-  public void setNoteId(String noteId) {
-    this.noteId = noteId;
-  }
-
-  /**
-   * set interpreter ids
-   * @param ids InterpreterSetting id list
-   * @throws IOException 
-   */
-  public void setInterpreters(List<String> ids) throws IOException {
-    factory.putNoteInterpreterSettingBinding(noteId, ids);
-  }
-  
-  public List<String> getInterpreters() {
-    return factory.getNoteInterpreterSettingBinding(noteId);
-  }
-  
-  public List<InterpreterSetting> getInterpreterSettings() {
-    List<String> interpreterSettingIds = factory.getNoteInterpreterSettingBinding(noteId);
-    LinkedList<InterpreterSetting> settings = new LinkedList<InterpreterSetting>();
-    synchronized (interpreterSettingIds) {
-      for (String id : interpreterSettingIds) {
-        InterpreterSetting setting = factory.get(id);
-        if (setting == null) {
-          // interpreter setting is removed from factory. remove id from here, too
-          interpreterSettingIds.remove(id);
-        } else {
-          settings.add(setting);
-        }
-      }
-    }
-    return settings;
-  }
-
-  public Interpreter get(String replName) {
-    List<InterpreterSetting> settings = getInterpreterSettings();
-    
-    if (settings == null || settings.size() == 0) {
-      return null;
-    }
-    
-    if (replName == null) {
-      return settings.get(0).getInterpreterGroup().getFirst();
-    }
-
-    if (Interpreter.registeredInterpreters == null) {
-      return null;
-    }
-    Interpreter.RegisteredInterpreter registeredInterpreter
-      = Interpreter.registeredInterpreters.get(replName);
-    if (registeredInterpreter == null || registeredInterpreter.getClassName() == null) {
-      throw new InterpreterException(replName + " interpreter not found");
-    }
-    String interpreterClassName = registeredInterpreter.getClassName();
-    for (InterpreterSetting setting : settings) {
-      InterpreterGroup intpGroup = setting.getInterpreterGroup();
-      for (Interpreter interpreter : intpGroup) {
-        if (interpreterClassName.equals(interpreter.getClassName())) {
-          return interpreter;
-        }
-      }
-    }
-    
-    return null;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Notebook.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Notebook.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Notebook.java
deleted file mode 100644
index fb92964..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Notebook.java
+++ /dev/null
@@ -1,283 +0,0 @@
-package com.nflabs.zeppelin.notebook;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import org.quartz.CronScheduleBuilder;
-import org.quartz.CronTrigger;
-import org.quartz.JobBuilder;
-import org.quartz.JobDetail;
-import org.quartz.JobExecutionContext;
-import org.quartz.JobExecutionException;
-import org.quartz.JobKey;
-import org.quartz.SchedulerException;
-import org.quartz.TriggerBuilder;
-import org.quartz.impl.StdSchedulerFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration;
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration.ConfVars;
-import com.nflabs.zeppelin.interpreter.InterpreterFactory;
-import com.nflabs.zeppelin.interpreter.InterpreterSetting;
-import com.nflabs.zeppelin.scheduler.Scheduler;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-
-/**
- * Collection of Notes.
- */
-public class Notebook {
-  Logger logger = LoggerFactory.getLogger(Notebook.class);
-  private SchedulerFactory schedulerFactory;
-  private InterpreterFactory replFactory;
-  /** Keep the order. */
-  Map<String, Note> notes = new LinkedHashMap<String, Note>();
-  private ZeppelinConfiguration conf;
-  private StdSchedulerFactory quertzSchedFact;
-  private org.quartz.Scheduler quartzSched;
-  private JobListenerFactory jobListenerFactory;
-
-  public Notebook(ZeppelinConfiguration conf, SchedulerFactory schedulerFactory,
-      InterpreterFactory replFactory, JobListenerFactory jobListenerFactory) throws IOException,
-      SchedulerException {
-    this.conf = conf;
-    this.schedulerFactory = schedulerFactory;
-    this.replFactory = replFactory;
-    this.jobListenerFactory = jobListenerFactory;
-    quertzSchedFact = new org.quartz.impl.StdSchedulerFactory();
-    quartzSched = quertzSchedFact.getScheduler();
-    quartzSched.start();
-    CronJob.notebook = this;
-
-    loadAllNotes();
-  }
-
-  /**
-   * Create new note.
-   *
-   * @return
-   * @throws IOException
-   */
-  public Note createNote() throws IOException {
-    if (conf.getBoolean(ConfVars.ZEPPELIN_NOTEBOOK_AUTO_INTERPRETER_BINDING)) {
-      return createNote(replFactory.getDefaultInterpreterSettingList());
-    } else {
-      return createNote(null);
-    }
-  }
-
-  /**
-   * Create new note.
-   *
-   * @return
-   * @throws IOException
-   */
-  public Note createNote(List<String> interpreterIds) throws IOException {
-    NoteInterpreterLoader intpLoader = new NoteInterpreterLoader(replFactory);
-    Note note = new Note(conf, intpLoader, jobListenerFactory, quartzSched);
-    intpLoader.setNoteId(note.id());
-    synchronized (notes) {
-      notes.put(note.id(), note);
-    }
-    if (interpreterIds != null) {
-      bindInterpretersToNote(note.id(), interpreterIds);
-    }
-
-    return note;
-  }
-
-  public void bindInterpretersToNote(String id,
-      List<String> interpreterSettingIds) throws IOException {
-    Note note = getNote(id);
-    if (note != null) {
-      note.getNoteReplLoader().setInterpreters(interpreterSettingIds);
-      replFactory.putNoteInterpreterSettingBinding(id, interpreterSettingIds);
-    }
-  }
-
-  public List<String> getBindedInterpreterSettingsIds(String id) {
-    Note note = getNote(id);
-    if (note != null) {
-      return note.getNoteReplLoader().getInterpreters();
-    } else {
-      return new LinkedList<String>();
-    }
-  }
-
-  public List<InterpreterSetting> getBindedInterpreterSettings(String id) {
-    Note note = getNote(id);
-    if (note != null) {
-      return note.getNoteReplLoader().getInterpreterSettings();
-    } else {
-      return new LinkedList<InterpreterSetting>();
-    }
-  }
-
-  public Note getNote(String id) {
-    synchronized (notes) {
-      return notes.get(id);
-    }
-  }
-
-  public void removeNote(String id) {
-    Note note;
-    synchronized (notes) {
-      note = notes.remove(id);
-    }
-    try {
-      note.unpersist();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  private void loadAllNotes() throws IOException {
-    File notebookDir = new File(conf.getNotebookDir());
-    File[] dirs = notebookDir.listFiles();
-    if (dirs == null) {
-      return;
-    }
-    for (File f : dirs) {
-      boolean isHidden = f.getName().startsWith(".");
-      if (f.isDirectory() && !isHidden) {
-        Scheduler scheduler =
-            schedulerFactory.createOrGetFIFOScheduler("note_" + System.currentTimeMillis());
-        logger.info("Loading note from " + f.getName());
-        NoteInterpreterLoader noteInterpreterLoader = new NoteInterpreterLoader(replFactory);
-        Note note = Note.load(f.getName(),
-            conf,
-            noteInterpreterLoader,
-            scheduler,
-            jobListenerFactory, quartzSched);
-        noteInterpreterLoader.setNoteId(note.id());
-
-        synchronized (notes) {
-          notes.put(note.id(), note);
-          refreshCron(note.id());
-        }
-      }
-    }
-  }
-
-  public List<Note> getAllNotes() {
-    synchronized (notes) {
-      List<Note> noteList = new ArrayList<Note>(notes.values());
-      logger.info("" + noteList.size());
-      Collections.sort(noteList, new Comparator() {
-        @Override
-        public int compare(Object one, Object two) {
-          Note note1 = (Note) one;
-          Note note2 = (Note) two;
-
-          String name1 = note1.id();
-          if (note1.getName() != null) {
-            name1 = note1.getName();
-          }
-          String name2 = note2.id();
-          if (note2.getName() != null) {
-            name2 = note2.getName();
-          }
-          ((Note) one).getName();
-          return name1.compareTo(name2);
-        }
-      });
-      return noteList;
-    }
-  }
-
-  public JobListenerFactory getJobListenerFactory() {
-    return jobListenerFactory;
-  }
-
-  public void setJobListenerFactory(JobListenerFactory jobListenerFactory) {
-    this.jobListenerFactory = jobListenerFactory;
-  }
-
-  /**
-   * Cron task for the note.
-   *
-   * @author Leemoonsoo
-   *
-   */
-  public static class CronJob implements org.quartz.Job {
-    public static Notebook notebook;
-
-    @Override
-    public void execute(JobExecutionContext context) throws JobExecutionException {
-
-      String noteId = context.getJobDetail().getJobDataMap().getString("noteId");
-      Note note = notebook.getNote(noteId);
-      note.runAll();
-    }
-  }
-
-  public void refreshCron(String id) {
-    removeCron(id);
-    synchronized (notes) {
-
-      Note note = notes.get(id);
-      if (note == null) {
-        return;
-      }
-      Map<String, Object> config = note.getConfig();
-      if (config == null) {
-        return;
-      }
-
-      String cronExpr = (String) note.getConfig().get("cron");
-      if (cronExpr == null || cronExpr.trim().length() == 0) {
-        return;
-      }
-
-
-      JobDetail newJob =
-          JobBuilder.newJob(CronJob.class).withIdentity(id, "note").usingJobData("noteId", id)
-          .build();
-
-      Map<String, Object> info = note.getInfo();
-      info.put("cron", null);
-
-      CronTrigger trigger = null;
-      try {
-        trigger =
-            TriggerBuilder.newTrigger().withIdentity("trigger_" + id, "note")
-            .withSchedule(CronScheduleBuilder.cronSchedule(cronExpr)).forJob(id, "note")
-            .build();
-      } catch (Exception e) {
-        logger.error("Error", e);
-        info.put("cron", e.getMessage());
-      }
-
-
-      try {
-        if (trigger != null) {
-          quartzSched.scheduleJob(newJob, trigger);
-        }
-      } catch (SchedulerException e) {
-        logger.error("Error", e);
-        info.put("cron", "Scheduler Exception");
-      }
-    }
-  }
-
-  private void removeCron(String id) {
-    try {
-      quartzSched.deleteJob(new JobKey(id, "note"));
-    } catch (SchedulerException e) {
-      logger.error("Can't remove quertz " + id, e);
-    }
-  }
-
-  public InterpreterFactory getInterpreterFactory() {
-    return replFactory;
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Paragraph.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Paragraph.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Paragraph.java
deleted file mode 100644
index aabd5de..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/Paragraph.java
+++ /dev/null
@@ -1,221 +0,0 @@
-package com.nflabs.zeppelin.notebook;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.display.Input;
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.Interpreter.FormType;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.scheduler.Job;
-import com.nflabs.zeppelin.scheduler.JobListener;
-
-/**
- * Paragraph is a representation of an execution unit.
- *
- * @author Leemoonsoo
- */
-public class Paragraph extends Job implements Serializable {
-  private static final transient long serialVersionUID = -6328572073497992016L;
-  private transient NoteInterpreterLoader replLoader;
-
-  String title;
-  String text;
-  private Map<String, Object> config; // paragraph configs like isOpen, colWidth, etc
-  public final GUI settings;          // form and parameter settings
-
-  public Paragraph(JobListener listener, NoteInterpreterLoader replLoader) {
-    super(generateId(), listener);
-    this.replLoader = replLoader;
-    title = null;
-    text = null;
-    settings = new GUI();
-    config = new HashMap<String, Object>();
-  }
-
-  private static String generateId() {
-    return "paragraph_" + System.currentTimeMillis() + "_"
-           + new Random(System.currentTimeMillis()).nextInt();
-  }
-
-  public String getText() {
-    return text;
-  }
-
-  public void setText(String newText) {
-    this.text = newText;
-  }
-
-
-  public String getTitle() {
-    return title;
-  }
-
-  public void setTitle(String title) {
-    this.title = title;
-  }
-
-  public String getRequiredReplName() {
-    return getRequiredReplName(text);
-  }
-
-  public static String getRequiredReplName(String text) {
-    if (text == null) {
-      return null;
-    }
-
-    // get script head
-    int scriptHeadIndex = 0;
-    for (int i = 0; i < text.length(); i++) {
-      char ch = text.charAt(i);
-      if (ch == ' ' || ch == '\n') {
-        scriptHeadIndex = i;
-        break;
-      }
-    }
-    if (scriptHeadIndex == 0) {
-      return null;
-    }
-    String head = text.substring(0, scriptHeadIndex);
-    if (head.startsWith("%")) {
-      return head.substring(1);
-    } else {
-      return null;
-    }
-  }
-
-  private String getScriptBody() {
-    return getScriptBody(text);
-  }
-
-  public static String getScriptBody(String text) {
-    if (text == null) {
-      return null;
-    }
-
-    String magic = getRequiredReplName(text);
-    if (magic == null) {
-      return text;
-    }
-    if (magic.length() + 2 >= text.length()) {
-      return "";
-    }
-    return text.substring(magic.length() + 2);
-  }
-
-  public NoteInterpreterLoader getNoteReplLoader() {
-    return replLoader;
-  }
-
-  public Interpreter getRepl(String name) {
-    return replLoader.get(name);
-  }
-
-  public List<String> completion(String buffer, int cursor) {
-    String replName = getRequiredReplName(buffer);
-    if (replName != null) {
-      cursor -= replName.length() + 1;
-    }
-    String body = getScriptBody(buffer);
-    Interpreter repl = getRepl(replName);
-    if (repl == null) {
-      return null;
-    }
-
-    return repl.completion(body, cursor);
-  }
-
-  public void setNoteReplLoader(NoteInterpreterLoader repls) {
-    this.replLoader = repls;
-  }
-
-  public InterpreterResult getResult() {
-    return (InterpreterResult) getReturn();
-  }
-
-  @Override
-  public int progress() {
-    String replName = getRequiredReplName();
-    Interpreter repl = getRepl(replName);
-    if (repl != null) {
-      return repl.getProgress(getInterpreterContext());
-    } else {
-      return 0;
-    }
-  }
-
-  @Override
-  public Map<String, Object> info() {
-    return null;
-  }
-
-  @Override
-  protected Object jobRun() throws Throwable {
-    String replName = getRequiredReplName();
-    Interpreter repl = getRepl(replName);
-    logger().info("run paragraph {} using {} " + repl, getId(), replName);
-    if (repl == null) {
-      logger().error("Can not find interpreter name " + repl);
-      throw new RuntimeException("Can not find interpreter for " + getRequiredReplName());
-    }
-
-    String script = getScriptBody();
-    // inject form
-    if (repl.getFormType() == FormType.NATIVE) {
-      settings.clear();
-    } else if (repl.getFormType() == FormType.SIMPLE) {
-      String scriptBody = getScriptBody();
-      Map<String, Input> inputs = Input.extractSimpleQueryParam(scriptBody); // inputs will be built
-                                                                             // from script body
-      settings.setForms(inputs);
-      script = Input.getSimpleQuery(settings.getParams(), scriptBody);
-    }
-    logger().info("RUN : " + script);
-    InterpreterResult ret = repl.interpret(script, getInterpreterContext());
-    return ret;
-  }
-
-  @Override
-  protected boolean jobAbort() {
-    Interpreter repl = getRepl(getRequiredReplName());
-    repl.cancel(getInterpreterContext());
-    return true;
-  }
-
-  private InterpreterContext getInterpreterContext() {
-    InterpreterContext interpreterContext = new InterpreterContext(getId(),
-            this.getTitle(),
-            this.getText(),
-            this.getConfig(),
-            this.settings);
-    return interpreterContext;
-  }
-
-  private Logger logger() {
-    Logger logger = LoggerFactory.getLogger(Paragraph.class);
-    return logger;
-  }
-
-
-  public Map<String, Object> getConfig() {
-    return config;
-  }
-
-  public void setConfig(Map<String, Object> config) {
-    this.config = config;
-  }
-
-  public void setReturn(InterpreterResult value, Throwable t) {
-    setResult(value);
-    setException(t);
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/utility/IdHashes.java
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/utility/IdHashes.java b/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/utility/IdHashes.java
deleted file mode 100644
index 65229bb..0000000
--- a/zeppelin-zengine/src/main/java/com/nflabs/zeppelin/notebook/utility/IdHashes.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.nflabs.zeppelin.notebook.utility;
-
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Generate Tiny ID.
- * 
- * @author anthonycorbacho
- *
- */
-public class IdHashes {
-  public static final char[] DICTIONARY = new char[] {'1', '2', '3', '4', '5', '6', '7', '8', '9',
-    'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U',
-    'V', 'W', 'X', 'Y', 'Z'};
-
-  /**
-   * encodes the given string into the base of the dictionary provided in the constructor.
-   * 
-   * @param value the number to encode.
-   * @return the encoded string.
-   */
-  public static String encode(Long value) {
-
-    List<Character> result = new ArrayList<Character>();
-    BigInteger base = new BigInteger("" + DICTIONARY.length);
-    int exponent = 1;
-    BigInteger remaining = new BigInteger(value.toString());
-    while (true) {
-      BigInteger a = base.pow(exponent); // 16^1 = 16
-      BigInteger b = remaining.mod(a); // 119 % 16 = 7 | 112 % 256 = 112
-      BigInteger c = base.pow(exponent - 1);
-      BigInteger d = b.divide(c);
-
-      // if d > dictionary.length, we have a problem. but BigInteger doesnt have
-      // a greater than method :-( hope for the best. theoretically, d is always
-      // an index of the dictionary!
-      result.add(DICTIONARY[d.intValue()]);
-      remaining = remaining.subtract(b); // 119 - 7 = 112 | 112 - 112 = 0
-
-      // finished?
-      if (remaining.equals(BigInteger.ZERO)) {
-        break;
-      }
-
-      exponent++;
-    }
-
-    // need to reverse it, since the start of the list contains the least significant values
-    StringBuffer sb = new StringBuffer();
-    for (int i = result.size() - 1; i >= 0; i--) {
-      sb.append(result.get(i));
-    }
-    return sb.toString();
-  }
-}


[11/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterService.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterService.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterService.java
deleted file mode 100644
index eed35c4..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterService.java
+++ /dev/null
@@ -1,8174 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.9.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- *  @generated
- */
-package com.nflabs.zeppelin.interpreter.thrift;
-
-import org.apache.thrift.scheme.IScheme;
-import org.apache.thrift.scheme.SchemeFactory;
-import org.apache.thrift.scheme.StandardScheme;
-
-import org.apache.thrift.scheme.TupleScheme;
-import org.apache.thrift.protocol.TTupleProtocol;
-import org.apache.thrift.protocol.TProtocolException;
-import org.apache.thrift.EncodingUtils;
-import org.apache.thrift.TException;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class RemoteInterpreterService {
-
-  public interface Iface {
-
-    public void createInterpreter(String className, Map<String,String> properties) throws org.apache.thrift.TException;
-
-    public void open(String className) throws org.apache.thrift.TException;
-
-    public void close(String className) throws org.apache.thrift.TException;
-
-    public RemoteInterpreterResult interpret(String className, String st, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException;
-
-    public void cancel(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException;
-
-    public int getProgress(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException;
-
-    public String getFormType(String className) throws org.apache.thrift.TException;
-
-    public List<String> completion(String className, String buf, int cursor) throws org.apache.thrift.TException;
-
-    public void shutdown() throws org.apache.thrift.TException;
-
-    public String getStatus(String jobId) throws org.apache.thrift.TException;
-
-  }
-
-  public interface AsyncIface {
-
-    public void createInterpreter(String className, Map<String,String> properties, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.createInterpreter_call> resultHandler) throws org.apache.thrift.TException;
-
-    public void open(String className, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.open_call> resultHandler) throws org.apache.thrift.TException;
-
-    public void close(String className, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.close_call> resultHandler) throws org.apache.thrift.TException;
-
-    public void interpret(String className, String st, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.interpret_call> resultHandler) throws org.apache.thrift.TException;
-
-    public void cancel(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.cancel_call> resultHandler) throws org.apache.thrift.TException;
-
-    public void getProgress(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getProgress_call> resultHandler) throws org.apache.thrift.TException;
-
-    public void getFormType(String className, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getFormType_call> resultHandler) throws org.apache.thrift.TException;
-
-    public void completion(String className, String buf, int cursor, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.completion_call> resultHandler) throws org.apache.thrift.TException;
-
-    public void shutdown(org.apache.thrift.async.AsyncMethodCallback<AsyncClient.shutdown_call> resultHandler) throws org.apache.thrift.TException;
-
-    public void getStatus(String jobId, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getStatus_call> resultHandler) throws org.apache.thrift.TException;
-
-  }
-
-  public static class Client extends org.apache.thrift.TServiceClient implements Iface {
-    public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
-      public Factory() {}
-      public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
-        return new Client(prot);
-      }
-      public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
-        return new Client(iprot, oprot);
-      }
-    }
-
-    public Client(org.apache.thrift.protocol.TProtocol prot)
-    {
-      super(prot, prot);
-    }
-
-    public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
-      super(iprot, oprot);
-    }
-
-    public void createInterpreter(String className, Map<String,String> properties) throws org.apache.thrift.TException
-    {
-      send_createInterpreter(className, properties);
-      recv_createInterpreter();
-    }
-
-    public void send_createInterpreter(String className, Map<String,String> properties) throws org.apache.thrift.TException
-    {
-      createInterpreter_args args = new createInterpreter_args();
-      args.setClassName(className);
-      args.setProperties(properties);
-      sendBase("createInterpreter", args);
-    }
-
-    public void recv_createInterpreter() throws org.apache.thrift.TException
-    {
-      createInterpreter_result result = new createInterpreter_result();
-      receiveBase(result, "createInterpreter");
-      return;
-    }
-
-    public void open(String className) throws org.apache.thrift.TException
-    {
-      send_open(className);
-      recv_open();
-    }
-
-    public void send_open(String className) throws org.apache.thrift.TException
-    {
-      open_args args = new open_args();
-      args.setClassName(className);
-      sendBase("open", args);
-    }
-
-    public void recv_open() throws org.apache.thrift.TException
-    {
-      open_result result = new open_result();
-      receiveBase(result, "open");
-      return;
-    }
-
-    public void close(String className) throws org.apache.thrift.TException
-    {
-      send_close(className);
-      recv_close();
-    }
-
-    public void send_close(String className) throws org.apache.thrift.TException
-    {
-      close_args args = new close_args();
-      args.setClassName(className);
-      sendBase("close", args);
-    }
-
-    public void recv_close() throws org.apache.thrift.TException
-    {
-      close_result result = new close_result();
-      receiveBase(result, "close");
-      return;
-    }
-
-    public RemoteInterpreterResult interpret(String className, String st, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
-    {
-      send_interpret(className, st, interpreterContext);
-      return recv_interpret();
-    }
-
-    public void send_interpret(String className, String st, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
-    {
-      interpret_args args = new interpret_args();
-      args.setClassName(className);
-      args.setSt(st);
-      args.setInterpreterContext(interpreterContext);
-      sendBase("interpret", args);
-    }
-
-    public RemoteInterpreterResult recv_interpret() throws org.apache.thrift.TException
-    {
-      interpret_result result = new interpret_result();
-      receiveBase(result, "interpret");
-      if (result.isSetSuccess()) {
-        return result.success;
-      }
-      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "interpret failed: unknown result");
-    }
-
-    public void cancel(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
-    {
-      send_cancel(className, interpreterContext);
-      recv_cancel();
-    }
-
-    public void send_cancel(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
-    {
-      cancel_args args = new cancel_args();
-      args.setClassName(className);
-      args.setInterpreterContext(interpreterContext);
-      sendBase("cancel", args);
-    }
-
-    public void recv_cancel() throws org.apache.thrift.TException
-    {
-      cancel_result result = new cancel_result();
-      receiveBase(result, "cancel");
-      return;
-    }
-
-    public int getProgress(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
-    {
-      send_getProgress(className, interpreterContext);
-      return recv_getProgress();
-    }
-
-    public void send_getProgress(String className, RemoteInterpreterContext interpreterContext) throws org.apache.thrift.TException
-    {
-      getProgress_args args = new getProgress_args();
-      args.setClassName(className);
-      args.setInterpreterContext(interpreterContext);
-      sendBase("getProgress", args);
-    }
-
-    public int recv_getProgress() throws org.apache.thrift.TException
-    {
-      getProgress_result result = new getProgress_result();
-      receiveBase(result, "getProgress");
-      if (result.isSetSuccess()) {
-        return result.success;
-      }
-      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getProgress failed: unknown result");
-    }
-
-    public String getFormType(String className) throws org.apache.thrift.TException
-    {
-      send_getFormType(className);
-      return recv_getFormType();
-    }
-
-    public void send_getFormType(String className) throws org.apache.thrift.TException
-    {
-      getFormType_args args = new getFormType_args();
-      args.setClassName(className);
-      sendBase("getFormType", args);
-    }
-
-    public String recv_getFormType() throws org.apache.thrift.TException
-    {
-      getFormType_result result = new getFormType_result();
-      receiveBase(result, "getFormType");
-      if (result.isSetSuccess()) {
-        return result.success;
-      }
-      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getFormType failed: unknown result");
-    }
-
-    public List<String> completion(String className, String buf, int cursor) throws org.apache.thrift.TException
-    {
-      send_completion(className, buf, cursor);
-      return recv_completion();
-    }
-
-    public void send_completion(String className, String buf, int cursor) throws org.apache.thrift.TException
-    {
-      completion_args args = new completion_args();
-      args.setClassName(className);
-      args.setBuf(buf);
-      args.setCursor(cursor);
-      sendBase("completion", args);
-    }
-
-    public List<String> recv_completion() throws org.apache.thrift.TException
-    {
-      completion_result result = new completion_result();
-      receiveBase(result, "completion");
-      if (result.isSetSuccess()) {
-        return result.success;
-      }
-      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "completion failed: unknown result");
-    }
-
-    public void shutdown() throws org.apache.thrift.TException
-    {
-      send_shutdown();
-      recv_shutdown();
-    }
-
-    public void send_shutdown() throws org.apache.thrift.TException
-    {
-      shutdown_args args = new shutdown_args();
-      sendBase("shutdown", args);
-    }
-
-    public void recv_shutdown() throws org.apache.thrift.TException
-    {
-      shutdown_result result = new shutdown_result();
-      receiveBase(result, "shutdown");
-      return;
-    }
-
-    public String getStatus(String jobId) throws org.apache.thrift.TException
-    {
-      send_getStatus(jobId);
-      return recv_getStatus();
-    }
-
-    public void send_getStatus(String jobId) throws org.apache.thrift.TException
-    {
-      getStatus_args args = new getStatus_args();
-      args.setJobId(jobId);
-      sendBase("getStatus", args);
-    }
-
-    public String recv_getStatus() throws org.apache.thrift.TException
-    {
-      getStatus_result result = new getStatus_result();
-      receiveBase(result, "getStatus");
-      if (result.isSetSuccess()) {
-        return result.success;
-      }
-      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getStatus failed: unknown result");
-    }
-
-  }
-  public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
-    public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
-      private org.apache.thrift.async.TAsyncClientManager clientManager;
-      private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
-      public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
-        this.clientManager = clientManager;
-        this.protocolFactory = protocolFactory;
-      }
-      public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
-        return new AsyncClient(protocolFactory, clientManager, transport);
-      }
-    }
-
-    public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
-      super(protocolFactory, clientManager, transport);
-    }
-
-    public void createInterpreter(String className, Map<String,String> properties, org.apache.thrift.async.AsyncMethodCallback<createInterpreter_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      createInterpreter_call method_call = new createInterpreter_call(className, properties, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class createInterpreter_call extends org.apache.thrift.async.TAsyncMethodCall {
-      private String className;
-      private Map<String,String> properties;
-      public createInterpreter_call(String className, Map<String,String> properties, org.apache.thrift.async.AsyncMethodCallback<createInterpreter_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.className = className;
-        this.properties = properties;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("createInterpreter", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        createInterpreter_args args = new createInterpreter_args();
-        args.setClassName(className);
-        args.setProperties(properties);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public void getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        (new Client(prot)).recv_createInterpreter();
-      }
-    }
-
-    public void open(String className, org.apache.thrift.async.AsyncMethodCallback<open_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      open_call method_call = new open_call(className, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class open_call extends org.apache.thrift.async.TAsyncMethodCall {
-      private String className;
-      public open_call(String className, org.apache.thrift.async.AsyncMethodCallback<open_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.className = className;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("open", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        open_args args = new open_args();
-        args.setClassName(className);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public void getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        (new Client(prot)).recv_open();
-      }
-    }
-
-    public void close(String className, org.apache.thrift.async.AsyncMethodCallback<close_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      close_call method_call = new close_call(className, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class close_call extends org.apache.thrift.async.TAsyncMethodCall {
-      private String className;
-      public close_call(String className, org.apache.thrift.async.AsyncMethodCallback<close_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.className = className;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("close", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        close_args args = new close_args();
-        args.setClassName(className);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public void getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        (new Client(prot)).recv_close();
-      }
-    }
-
-    public void interpret(String className, String st, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<interpret_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      interpret_call method_call = new interpret_call(className, st, interpreterContext, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class interpret_call extends org.apache.thrift.async.TAsyncMethodCall {
-      private String className;
-      private String st;
-      private RemoteInterpreterContext interpreterContext;
-      public interpret_call(String className, String st, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<interpret_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.className = className;
-        this.st = st;
-        this.interpreterContext = interpreterContext;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("interpret", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        interpret_args args = new interpret_args();
-        args.setClassName(className);
-        args.setSt(st);
-        args.setInterpreterContext(interpreterContext);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public RemoteInterpreterResult getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        return (new Client(prot)).recv_interpret();
-      }
-    }
-
-    public void cancel(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<cancel_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      cancel_call method_call = new cancel_call(className, interpreterContext, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class cancel_call extends org.apache.thrift.async.TAsyncMethodCall {
-      private String className;
-      private RemoteInterpreterContext interpreterContext;
-      public cancel_call(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<cancel_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.className = className;
-        this.interpreterContext = interpreterContext;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("cancel", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        cancel_args args = new cancel_args();
-        args.setClassName(className);
-        args.setInterpreterContext(interpreterContext);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public void getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        (new Client(prot)).recv_cancel();
-      }
-    }
-
-    public void getProgress(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<getProgress_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      getProgress_call method_call = new getProgress_call(className, interpreterContext, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class getProgress_call extends org.apache.thrift.async.TAsyncMethodCall {
-      private String className;
-      private RemoteInterpreterContext interpreterContext;
-      public getProgress_call(String className, RemoteInterpreterContext interpreterContext, org.apache.thrift.async.AsyncMethodCallback<getProgress_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.className = className;
-        this.interpreterContext = interpreterContext;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getProgress", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        getProgress_args args = new getProgress_args();
-        args.setClassName(className);
-        args.setInterpreterContext(interpreterContext);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public int getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        return (new Client(prot)).recv_getProgress();
-      }
-    }
-
-    public void getFormType(String className, org.apache.thrift.async.AsyncMethodCallback<getFormType_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      getFormType_call method_call = new getFormType_call(className, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class getFormType_call extends org.apache.thrift.async.TAsyncMethodCall {
-      private String className;
-      public getFormType_call(String className, org.apache.thrift.async.AsyncMethodCallback<getFormType_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.className = className;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getFormType", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        getFormType_args args = new getFormType_args();
-        args.setClassName(className);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public String getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        return (new Client(prot)).recv_getFormType();
-      }
-    }
-
-    public void completion(String className, String buf, int cursor, org.apache.thrift.async.AsyncMethodCallback<completion_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      completion_call method_call = new completion_call(className, buf, cursor, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class completion_call extends org.apache.thrift.async.TAsyncMethodCall {
-      private String className;
-      private String buf;
-      private int cursor;
-      public completion_call(String className, String buf, int cursor, org.apache.thrift.async.AsyncMethodCallback<completion_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.className = className;
-        this.buf = buf;
-        this.cursor = cursor;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("completion", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        completion_args args = new completion_args();
-        args.setClassName(className);
-        args.setBuf(buf);
-        args.setCursor(cursor);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public List<String> getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        return (new Client(prot)).recv_completion();
-      }
-    }
-
-    public void shutdown(org.apache.thrift.async.AsyncMethodCallback<shutdown_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      shutdown_call method_call = new shutdown_call(resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class shutdown_call extends org.apache.thrift.async.TAsyncMethodCall {
-      public shutdown_call(org.apache.thrift.async.AsyncMethodCallback<shutdown_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("shutdown", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        shutdown_args args = new shutdown_args();
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public void getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        (new Client(prot)).recv_shutdown();
-      }
-    }
-
-    public void getStatus(String jobId, org.apache.thrift.async.AsyncMethodCallback<getStatus_call> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      getStatus_call method_call = new getStatus_call(jobId, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class getStatus_call extends org.apache.thrift.async.TAsyncMethodCall {
-      private String jobId;
-      public getStatus_call(String jobId, org.apache.thrift.async.AsyncMethodCallback<getStatus_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.jobId = jobId;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getStatus", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        getStatus_args args = new getStatus_args();
-        args.setJobId(jobId);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public String getResult() throws org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        return (new Client(prot)).recv_getStatus();
-      }
-    }
-
-  }
-
-  public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
-    private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
-    public Processor(I iface) {
-      super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
-    }
-
-    protected Processor(I iface, Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> processMap) {
-      super(iface, getProcessMap(processMap));
-    }
-
-    private static <I extends Iface> Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> getProcessMap(Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> processMap) {
-      processMap.put("createInterpreter", new createInterpreter());
-      processMap.put("open", new open());
-      processMap.put("close", new close());
-      processMap.put("interpret", new interpret());
-      processMap.put("cancel", new cancel());
-      processMap.put("getProgress", new getProgress());
-      processMap.put("getFormType", new getFormType());
-      processMap.put("completion", new completion());
-      processMap.put("shutdown", new shutdown());
-      processMap.put("getStatus", new getStatus());
-      return processMap;
-    }
-
-    public static class createInterpreter<I extends Iface> extends org.apache.thrift.ProcessFunction<I, createInterpreter_args> {
-      public createInterpreter() {
-        super("createInterpreter");
-      }
-
-      public createInterpreter_args getEmptyArgsInstance() {
-        return new createInterpreter_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public createInterpreter_result getResult(I iface, createInterpreter_args args) throws org.apache.thrift.TException {
-        createInterpreter_result result = new createInterpreter_result();
-        iface.createInterpreter(args.className, args.properties);
-        return result;
-      }
-    }
-
-    public static class open<I extends Iface> extends org.apache.thrift.ProcessFunction<I, open_args> {
-      public open() {
-        super("open");
-      }
-
-      public open_args getEmptyArgsInstance() {
-        return new open_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public open_result getResult(I iface, open_args args) throws org.apache.thrift.TException {
-        open_result result = new open_result();
-        iface.open(args.className);
-        return result;
-      }
-    }
-
-    public static class close<I extends Iface> extends org.apache.thrift.ProcessFunction<I, close_args> {
-      public close() {
-        super("close");
-      }
-
-      public close_args getEmptyArgsInstance() {
-        return new close_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public close_result getResult(I iface, close_args args) throws org.apache.thrift.TException {
-        close_result result = new close_result();
-        iface.close(args.className);
-        return result;
-      }
-    }
-
-    public static class interpret<I extends Iface> extends org.apache.thrift.ProcessFunction<I, interpret_args> {
-      public interpret() {
-        super("interpret");
-      }
-
-      public interpret_args getEmptyArgsInstance() {
-        return new interpret_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public interpret_result getResult(I iface, interpret_args args) throws org.apache.thrift.TException {
-        interpret_result result = new interpret_result();
-        result.success = iface.interpret(args.className, args.st, args.interpreterContext);
-        return result;
-      }
-    }
-
-    public static class cancel<I extends Iface> extends org.apache.thrift.ProcessFunction<I, cancel_args> {
-      public cancel() {
-        super("cancel");
-      }
-
-      public cancel_args getEmptyArgsInstance() {
-        return new cancel_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public cancel_result getResult(I iface, cancel_args args) throws org.apache.thrift.TException {
-        cancel_result result = new cancel_result();
-        iface.cancel(args.className, args.interpreterContext);
-        return result;
-      }
-    }
-
-    public static class getProgress<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getProgress_args> {
-      public getProgress() {
-        super("getProgress");
-      }
-
-      public getProgress_args getEmptyArgsInstance() {
-        return new getProgress_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public getProgress_result getResult(I iface, getProgress_args args) throws org.apache.thrift.TException {
-        getProgress_result result = new getProgress_result();
-        result.success = iface.getProgress(args.className, args.interpreterContext);
-        result.setSuccessIsSet(true);
-        return result;
-      }
-    }
-
-    public static class getFormType<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getFormType_args> {
-      public getFormType() {
-        super("getFormType");
-      }
-
-      public getFormType_args getEmptyArgsInstance() {
-        return new getFormType_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public getFormType_result getResult(I iface, getFormType_args args) throws org.apache.thrift.TException {
-        getFormType_result result = new getFormType_result();
-        result.success = iface.getFormType(args.className);
-        return result;
-      }
-    }
-
-    public static class completion<I extends Iface> extends org.apache.thrift.ProcessFunction<I, completion_args> {
-      public completion() {
-        super("completion");
-      }
-
-      public completion_args getEmptyArgsInstance() {
-        return new completion_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public completion_result getResult(I iface, completion_args args) throws org.apache.thrift.TException {
-        completion_result result = new completion_result();
-        result.success = iface.completion(args.className, args.buf, args.cursor);
-        return result;
-      }
-    }
-
-    public static class shutdown<I extends Iface> extends org.apache.thrift.ProcessFunction<I, shutdown_args> {
-      public shutdown() {
-        super("shutdown");
-      }
-
-      public shutdown_args getEmptyArgsInstance() {
-        return new shutdown_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public shutdown_result getResult(I iface, shutdown_args args) throws org.apache.thrift.TException {
-        shutdown_result result = new shutdown_result();
-        iface.shutdown();
-        return result;
-      }
-    }
-
-    public static class getStatus<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getStatus_args> {
-      public getStatus() {
-        super("getStatus");
-      }
-
-      public getStatus_args getEmptyArgsInstance() {
-        return new getStatus_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      public getStatus_result getResult(I iface, getStatus_args args) throws org.apache.thrift.TException {
-        getStatus_result result = new getStatus_result();
-        result.success = iface.getStatus(args.jobId);
-        return result;
-      }
-    }
-
-  }
-
-  public static class createInterpreter_args implements org.apache.thrift.TBase<createInterpreter_args, createInterpreter_args._Fields>, java.io.Serializable, Cloneable   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("createInterpreter_args");
-
-    private static final org.apache.thrift.protocol.TField CLASS_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("className", org.apache.thrift.protocol.TType.STRING, (short)1);
-    private static final org.apache.thrift.protocol.TField PROPERTIES_FIELD_DESC = new org.apache.thrift.protocol.TField("properties", org.apache.thrift.protocol.TType.MAP, (short)2);
-
-    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
-    static {
-      schemes.put(StandardScheme.class, new createInterpreter_argsStandardSchemeFactory());
-      schemes.put(TupleScheme.class, new createInterpreter_argsTupleSchemeFactory());
-    }
-
-    public String className; // required
-    public Map<String,String> properties; // required
-
-    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-      CLASS_NAME((short)1, "className"),
-      PROPERTIES((short)2, "properties");
-
-      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
-      static {
-        for (_Fields field : EnumSet.allOf(_Fields.class)) {
-          byName.put(field.getFieldName(), field);
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, or null if its not found.
-       */
-      public static _Fields findByThriftId(int fieldId) {
-        switch(fieldId) {
-          case 1: // CLASS_NAME
-            return CLASS_NAME;
-          case 2: // PROPERTIES
-            return PROPERTIES;
-          default:
-            return null;
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, throwing an exception
-       * if it is not found.
-       */
-      public static _Fields findByThriftIdOrThrow(int fieldId) {
-        _Fields fields = findByThriftId(fieldId);
-        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-        return fields;
-      }
-
-      /**
-       * Find the _Fields constant that matches name, or null if its not found.
-       */
-      public static _Fields findByName(String name) {
-        return byName.get(name);
-      }
-
-      private final short _thriftId;
-      private final String _fieldName;
-
-      _Fields(short thriftId, String fieldName) {
-        _thriftId = thriftId;
-        _fieldName = fieldName;
-      }
-
-      public short getThriftFieldId() {
-        return _thriftId;
-      }
-
-      public String getFieldName() {
-        return _fieldName;
-      }
-    }
-
-    // isset id assignments
-    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-    static {
-      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      tmpMap.put(_Fields.CLASS_NAME, new org.apache.thrift.meta_data.FieldMetaData("className", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-      tmpMap.put(_Fields.PROPERTIES, new org.apache.thrift.meta_data.FieldMetaData("properties", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
-              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), 
-              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
-      metaDataMap = Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(createInterpreter_args.class, metaDataMap);
-    }
-
-    public createInterpreter_args() {
-    }
-
-    public createInterpreter_args(
-      String className,
-      Map<String,String> properties)
-    {
-      this();
-      this.className = className;
-      this.properties = properties;
-    }
-
-    /**
-     * Performs a deep copy on <i>other</i>.
-     */
-    public createInterpreter_args(createInterpreter_args other) {
-      if (other.isSetClassName()) {
-        this.className = other.className;
-      }
-      if (other.isSetProperties()) {
-        Map<String,String> __this__properties = new HashMap<String,String>();
-        for (Map.Entry<String, String> other_element : other.properties.entrySet()) {
-
-          String other_element_key = other_element.getKey();
-          String other_element_value = other_element.getValue();
-
-          String __this__properties_copy_key = other_element_key;
-
-          String __this__properties_copy_value = other_element_value;
-
-          __this__properties.put(__this__properties_copy_key, __this__properties_copy_value);
-        }
-        this.properties = __this__properties;
-      }
-    }
-
-    public createInterpreter_args deepCopy() {
-      return new createInterpreter_args(this);
-    }
-
-    @Override
-    public void clear() {
-      this.className = null;
-      this.properties = null;
-    }
-
-    public String getClassName() {
-      return this.className;
-    }
-
-    public createInterpreter_args setClassName(String className) {
-      this.className = className;
-      return this;
-    }
-
-    public void unsetClassName() {
-      this.className = null;
-    }
-
-    /** Returns true if field className is set (has been assigned a value) and false otherwise */
-    public boolean isSetClassName() {
-      return this.className != null;
-    }
-
-    public void setClassNameIsSet(boolean value) {
-      if (!value) {
-        this.className = null;
-      }
-    }
-
-    public int getPropertiesSize() {
-      return (this.properties == null) ? 0 : this.properties.size();
-    }
-
-    public void putToProperties(String key, String val) {
-      if (this.properties == null) {
-        this.properties = new HashMap<String,String>();
-      }
-      this.properties.put(key, val);
-    }
-
-    public Map<String,String> getProperties() {
-      return this.properties;
-    }
-
-    public createInterpreter_args setProperties(Map<String,String> properties) {
-      this.properties = properties;
-      return this;
-    }
-
-    public void unsetProperties() {
-      this.properties = null;
-    }
-
-    /** Returns true if field properties is set (has been assigned a value) and false otherwise */
-    public boolean isSetProperties() {
-      return this.properties != null;
-    }
-
-    public void setPropertiesIsSet(boolean value) {
-      if (!value) {
-        this.properties = null;
-      }
-    }
-
-    public void setFieldValue(_Fields field, Object value) {
-      switch (field) {
-      case CLASS_NAME:
-        if (value == null) {
-          unsetClassName();
-        } else {
-          setClassName((String)value);
-        }
-        break;
-
-      case PROPERTIES:
-        if (value == null) {
-          unsetProperties();
-        } else {
-          setProperties((Map<String,String>)value);
-        }
-        break;
-
-      }
-    }
-
-    public Object getFieldValue(_Fields field) {
-      switch (field) {
-      case CLASS_NAME:
-        return getClassName();
-
-      case PROPERTIES:
-        return getProperties();
-
-      }
-      throw new IllegalStateException();
-    }
-
-    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-    public boolean isSet(_Fields field) {
-      if (field == null) {
-        throw new IllegalArgumentException();
-      }
-
-      switch (field) {
-      case CLASS_NAME:
-        return isSetClassName();
-      case PROPERTIES:
-        return isSetProperties();
-      }
-      throw new IllegalStateException();
-    }
-
-    @Override
-    public boolean equals(Object that) {
-      if (that == null)
-        return false;
-      if (that instanceof createInterpreter_args)
-        return this.equals((createInterpreter_args)that);
-      return false;
-    }
-
-    public boolean equals(createInterpreter_args that) {
-      if (that == null)
-        return false;
-
-      boolean this_present_className = true && this.isSetClassName();
-      boolean that_present_className = true && that.isSetClassName();
-      if (this_present_className || that_present_className) {
-        if (!(this_present_className && that_present_className))
-          return false;
-        if (!this.className.equals(that.className))
-          return false;
-      }
-
-      boolean this_present_properties = true && this.isSetProperties();
-      boolean that_present_properties = true && that.isSetProperties();
-      if (this_present_properties || that_present_properties) {
-        if (!(this_present_properties && that_present_properties))
-          return false;
-        if (!this.properties.equals(that.properties))
-          return false;
-      }
-
-      return true;
-    }
-
-    @Override
-    public int hashCode() {
-      return 0;
-    }
-
-    public int compareTo(createInterpreter_args other) {
-      if (!getClass().equals(other.getClass())) {
-        return getClass().getName().compareTo(other.getClass().getName());
-      }
-
-      int lastComparison = 0;
-      createInterpreter_args typedOther = (createInterpreter_args)other;
-
-      lastComparison = Boolean.valueOf(isSetClassName()).compareTo(typedOther.isSetClassName());
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-      if (isSetClassName()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.className, typedOther.className);
-        if (lastComparison != 0) {
-          return lastComparison;
-        }
-      }
-      lastComparison = Boolean.valueOf(isSetProperties()).compareTo(typedOther.isSetProperties());
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-      if (isSetProperties()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.properties, typedOther.properties);
-        if (lastComparison != 0) {
-          return lastComparison;
-        }
-      }
-      return 0;
-    }
-
-    public _Fields fieldForId(int fieldId) {
-      return _Fields.findByThriftId(fieldId);
-    }
-
-    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
-    }
-
-    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
-    }
-
-    @Override
-    public String toString() {
-      StringBuilder sb = new StringBuilder("createInterpreter_args(");
-      boolean first = true;
-
-      sb.append("className:");
-      if (this.className == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.className);
-      }
-      first = false;
-      if (!first) sb.append(", ");
-      sb.append("properties:");
-      if (this.properties == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.properties);
-      }
-      first = false;
-      sb.append(")");
-      return sb.toString();
-    }
-
-    public void validate() throws org.apache.thrift.TException {
-      // check for required fields
-      // check for sub-struct validity
-    }
-
-    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-      try {
-        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
-      }
-    }
-
-    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
-      try {
-        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
-      }
-    }
-
-    private static class createInterpreter_argsStandardSchemeFactory implements SchemeFactory {
-      public createInterpreter_argsStandardScheme getScheme() {
-        return new createInterpreter_argsStandardScheme();
-      }
-    }
-
-    private static class createInterpreter_argsStandardScheme extends StandardScheme<createInterpreter_args> {
-
-      public void read(org.apache.thrift.protocol.TProtocol iprot, createInterpreter_args struct) throws org.apache.thrift.TException {
-        org.apache.thrift.protocol.TField schemeField;
-        iprot.readStructBegin();
-        while (true)
-        {
-          schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-            break;
-          }
-          switch (schemeField.id) {
-            case 1: // CLASS_NAME
-              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-                struct.className = iprot.readString();
-                struct.setClassNameIsSet(true);
-              } else { 
-                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-              }
-              break;
-            case 2: // PROPERTIES
-              if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
-                {
-                  org.apache.thrift.protocol.TMap _map0 = iprot.readMapBegin();
-                  struct.properties = new HashMap<String,String>(2*_map0.size);
-                  for (int _i1 = 0; _i1 < _map0.size; ++_i1)
-                  {
-                    String _key2; // required
-                    String _val3; // required
-                    _key2 = iprot.readString();
-                    _val3 = iprot.readString();
-                    struct.properties.put(_key2, _val3);
-                  }
-                  iprot.readMapEnd();
-                }
-                struct.setPropertiesIsSet(true);
-              } else { 
-                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-              }
-              break;
-            default:
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-          }
-          iprot.readFieldEnd();
-        }
-        iprot.readStructEnd();
-
-        // check for required fields of primitive type, which can't be checked in the validate method
-        struct.validate();
-      }
-
-      public void write(org.apache.thrift.protocol.TProtocol oprot, createInterpreter_args struct) throws org.apache.thrift.TException {
-        struct.validate();
-
-        oprot.writeStructBegin(STRUCT_DESC);
-        if (struct.className != null) {
-          oprot.writeFieldBegin(CLASS_NAME_FIELD_DESC);
-          oprot.writeString(struct.className);
-          oprot.writeFieldEnd();
-        }
-        if (struct.properties != null) {
-          oprot.writeFieldBegin(PROPERTIES_FIELD_DESC);
-          {
-            oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.properties.size()));
-            for (Map.Entry<String, String> _iter4 : struct.properties.entrySet())
-            {
-              oprot.writeString(_iter4.getKey());
-              oprot.writeString(_iter4.getValue());
-            }
-            oprot.writeMapEnd();
-          }
-          oprot.writeFieldEnd();
-        }
-        oprot.writeFieldStop();
-        oprot.writeStructEnd();
-      }
-
-    }
-
-    private static class createInterpreter_argsTupleSchemeFactory implements SchemeFactory {
-      public createInterpreter_argsTupleScheme getScheme() {
-        return new createInterpreter_argsTupleScheme();
-      }
-    }
-
-    private static class createInterpreter_argsTupleScheme extends TupleScheme<createInterpreter_args> {
-
-      @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, createInterpreter_args struct) throws org.apache.thrift.TException {
-        TTupleProtocol oprot = (TTupleProtocol) prot;
-        BitSet optionals = new BitSet();
-        if (struct.isSetClassName()) {
-          optionals.set(0);
-        }
-        if (struct.isSetProperties()) {
-          optionals.set(1);
-        }
-        oprot.writeBitSet(optionals, 2);
-        if (struct.isSetClassName()) {
-          oprot.writeString(struct.className);
-        }
-        if (struct.isSetProperties()) {
-          {
-            oprot.writeI32(struct.properties.size());
-            for (Map.Entry<String, String> _iter5 : struct.properties.entrySet())
-            {
-              oprot.writeString(_iter5.getKey());
-              oprot.writeString(_iter5.getValue());
-            }
-          }
-        }
-      }
-
-      @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, createInterpreter_args struct) throws org.apache.thrift.TException {
-        TTupleProtocol iprot = (TTupleProtocol) prot;
-        BitSet incoming = iprot.readBitSet(2);
-        if (incoming.get(0)) {
-          struct.className = iprot.readString();
-          struct.setClassNameIsSet(true);
-        }
-        if (incoming.get(1)) {
-          {
-            org.apache.thrift.protocol.TMap _map6 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
-            struct.properties = new HashMap<String,String>(2*_map6.size);
-            for (int _i7 = 0; _i7 < _map6.size; ++_i7)
-            {
-              String _key8; // required
-              String _val9; // required
-              _key8 = iprot.readString();
-              _val9 = iprot.readString();
-              struct.properties.put(_key8, _val9);
-            }
-          }
-          struct.setPropertiesIsSet(true);
-        }
-      }
-    }
-
-  }
-
-  public static class createInterpreter_result implements org.apache.thrift.TBase<createInterpreter_result, createInterpreter_result._Fields>, java.io.Serializable, Cloneable   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("createInterpreter_result");
-
-
-    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
-    static {
-      schemes.put(StandardScheme.class, new createInterpreter_resultStandardSchemeFactory());
-      schemes.put(TupleScheme.class, new createInterpreter_resultTupleSchemeFactory());
-    }
-
-
-    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-;
-
-      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
-      static {
-        for (_Fields field : EnumSet.allOf(_Fields.class)) {
-          byName.put(field.getFieldName(), field);
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, or null if its not found.
-       */
-      public static _Fields findByThriftId(int fieldId) {
-        switch(fieldId) {
-          default:
-            return null;
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, throwing an exception
-       * if it is not found.
-       */
-      public static _Fields findByThriftIdOrThrow(int fieldId) {
-        _Fields fields = findByThriftId(fieldId);
-        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-        return fields;
-      }
-
-      /**
-       * Find the _Fields constant that matches name, or null if its not found.
-       */
-      public static _Fields findByName(String name) {
-        return byName.get(name);
-      }
-
-      private final short _thriftId;
-      private final String _fieldName;
-
-      _Fields(short thriftId, String fieldName) {
-        _thriftId = thriftId;
-        _fieldName = fieldName;
-      }
-
-      public short getThriftFieldId() {
-        return _thriftId;
-      }
-
-      public String getFieldName() {
-        return _fieldName;
-      }
-    }
-    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-    static {
-      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      metaDataMap = Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(createInterpreter_result.class, metaDataMap);
-    }
-
-    public createInterpreter_result() {
-    }
-
-    /**
-     * Performs a deep copy on <i>other</i>.
-     */
-    public createInterpreter_result(createInterpreter_result other) {
-    }
-
-    public createInterpreter_result deepCopy() {
-      return new createInterpreter_result(this);
-    }
-
-    @Override
-    public void clear() {
-    }
-
-    public void setFieldValue(_Fields field, Object value) {
-      switch (field) {
-      }
-    }
-
-    public Object getFieldValue(_Fields field) {
-      switch (field) {
-      }
-      throw new IllegalStateException();
-    }
-
-    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-    public boolean isSet(_Fields field) {
-      if (field == null) {
-        throw new IllegalArgumentException();
-      }
-
-      switch (field) {
-      }
-      throw new IllegalStateException();
-    }
-
-    @Override
-    public boolean equals(Object that) {
-      if (that == null)
-        return false;
-      if (that instanceof createInterpreter_result)
-        return this.equals((createInterpreter_result)that);
-      return false;
-    }
-
-    public boolean equals(createInterpreter_result that) {
-      if (that == null)
-        return false;
-
-      return true;
-    }
-
-    @Override
-    public int hashCode() {
-      return 0;
-    }
-
-    public int compareTo(createInterpreter_result other) {
-      if (!getClass().equals(other.getClass())) {
-        return getClass().getName().compareTo(other.getClass().getName());
-      }
-
-      int lastComparison = 0;
-      createInterpreter_result typedOther = (createInterpreter_result)other;
-
-      return 0;
-    }
-
-    public _Fields fieldForId(int fieldId) {
-      return _Fields.findByThriftId(fieldId);
-    }
-
-    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
-    }
-
-    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
-      }
-
-    @Override
-    public String toString() {
-      StringBuilder sb = new StringBuilder("createInterpreter_result(");
-      boolean first = true;
-
-      sb.append(")");
-      return sb.toString();
-    }
-
-    public void validate() throws org.apache.thrift.TException {
-      // check for required fields
-      // check for sub-struct validity
-    }
-
-    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-      try {
-        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
-      }
-    }
-
-    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
-      try {
-        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
-      }
-    }
-
-    private static class createInterpreter_resultStandardSchemeFactory implements SchemeFactory {
-      public createInterpreter_resultStandardScheme getScheme() {
-        return new createInterpreter_resultStandardScheme();
-      }
-    }
-
-    private static class createInterpreter_resultStandardScheme extends StandardScheme<createInterpreter_result> {
-
-      public void read(org.apache.thrift.protocol.TProtocol iprot, createInterpreter_result struct) throws org.apache.thrift.TException {
-        org.apache.thrift.protocol.TField schemeField;
-        iprot.readStructBegin();
-        while (true)
-        {
-          schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-            break;
-          }
-          switch (schemeField.id) {
-            default:
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-          }
-          iprot.readFieldEnd();
-        }
-        iprot.readStructEnd();
-
-        // check for required fields of primitive type, which can't be checked in the validate method
-        struct.validate();
-      }
-
-      public void write(org.apache.thrift.protocol.TProtocol oprot, createInterpreter_result struct) throws org.apache.thrift.TException {
-        struct.validate();
-
-        oprot.writeStructBegin(STRUCT_DESC);
-        oprot.writeFieldStop();
-        oprot.writeStructEnd();
-      }
-
-    }
-
-    private static class createInterpreter_resultTupleSchemeFactory implements SchemeFactory {
-      public createInterpreter_resultTupleScheme getScheme() {
-        return new createInterpreter_resultTupleScheme();
-      }
-    }
-
-    private static class createInterpreter_resultTupleScheme extends TupleScheme<createInterpreter_result> {
-
-      @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, createInterpreter_result struct) throws org.apache.thrift.TException {
-        TTupleProtocol oprot = (TTupleProtocol) prot;
-      }
-
-      @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, createInterpreter_result struct) throws org.apache.thrift.TException {
-        TTupleProtocol iprot = (TTupleProtocol) prot;
-      }
-    }
-
-  }
-
-  public static class open_args implements org.apache.thrift.TBase<open_args, open_args._Fields>, java.io.Serializable, Cloneable   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("open_args");
-
-    private static final org.apache.thrift.protocol.TField CLASS_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("className", org.apache.thrift.protocol.TType.STRING, (short)1);
-
-    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
-    static {
-      schemes.put(StandardScheme.class, new open_argsStandardSchemeFactory());
-      schemes.put(TupleScheme.class, new open_argsTupleSchemeFactory());
-    }
-
-    public String className; // required
-
-    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-      CLASS_NAME((short)1, "className");
-
-      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
-      static {
-        for (_Fields field : EnumSet.allOf(_Fields.class)) {
-          byName.put(field.getFieldName(), field);
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, or null if its not found.
-       */
-      public static _Fields findByThriftId(int fieldId) {
-        switch(fieldId) {
-          case 1: // CLASS_NAME
-            return CLASS_NAME;
-          default:
-            return null;
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, throwing an exception
-       * if it is not found.
-       */
-      public static _Fields findByThriftIdOrThrow(int fieldId) {
-        _Fields fields = findByThriftId(fieldId);
-        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-        return fields;
-      }
-
-      /**
-       * Find the _Fields constant that matches name, or null if its not found.
-       */
-      public static _Fields findByName(String name) {
-        return byName.get(name);
-      }
-
-      private final short _thriftId;
-      private final String _fieldName;
-
-      _Fields(short thriftId, String fieldName) {
-        _thriftId = thriftId;
-        _fieldName = fieldName;
-      }
-
-      public short getThriftFieldId() {
-        return _thriftId;
-      }
-
-      public String getFieldName() {
-        return _fieldName;
-      }
-    }
-
-    // isset id assignments
-    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-    static {
-      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      tmpMap.put(_Fields.CLASS_NAME, new org.apache.thrift.meta_data.FieldMetaData("className", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-      metaDataMap = Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(open_args.class, metaDataMap);
-    }
-
-    public open_args() {
-    }
-
-    public open_args(
-      String className)
-    {
-      this();
-      this.className = className;
-    }
-
-    /**
-     * Performs a deep copy on <i>other</i>.
-     */
-    public open_args(open_args other) {
-      if (other.isSetClassName()) {
-        this.className = other.className;
-      }
-    }
-
-    public open_args deepCopy() {
-      return new open_args(this);
-    }
-
-    @Override
-    public void clear() {
-      this.className = null;
-    }
-
-    public String getClassName() {
-      return this.className;
-    }
-
-    public open_args setClassName(String className) {
-      this.className = className;
-      return this;
-    }
-
-    public void unsetClassName() {
-      this.className = null;
-    }
-
-    /** Returns true if field className is set (has been assigned a value) and false otherwise */
-    public boolean isSetClassName() {
-      return this.className != null;
-    }
-
-    public void setClassNameIsSet(boolean value) {
-      if (!value) {
-        this.className = null;
-      }
-    }
-
-    public void setFieldValue(_Fields field, Object value) {
-      switch (field) {
-      case CLASS_NAME:
-        if (value == null) {
-          unsetClassName();
-        } else {
-          setClassName((String)value);
-        }
-        break;
-
-      }
-    }
-
-    public Object getFieldValue(_Fields field) {
-      switch (field) {
-      case CLASS_NAME:
-        return getClassName();
-
-      }
-      throw new IllegalStateException();
-    }
-
-    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-    public boolean isSet(_Fields field) {
-      if (field == null) {
-        throw new IllegalArgumentException();
-      }
-
-      switch (field) {
-      case CLASS_NAME:
-        return isSetClassName();
-      }
-      throw new IllegalStateException();
-    }
-
-    @Override
-    public boolean equals(Object that) {
-      if (that == null)
-        return false;
-      if (that instanceof open_args)
-        return this.equals((open_args)that);
-      return false;
-    }
-
-    public boolean equals(open_args that) {
-      if (that == null)
-        return false;
-
-      boolean this_present_className = true && this.isSetClassName();
-      boolean that_present_className = true && that.isSetClassName();
-      if (this_present_className || that_present_className) {
-        if (!(this_present_className && that_present_className))
-          return false;
-        if (!this.className.equals(that.className))
-          return false;
-      }
-
-      return true;
-    }
-
-    @Override
-    public int hashCode() {
-      return 0;
-    }
-
-    public int compareTo(open_args other) {
-      if (!getClass().equals(other.getClass())) {
-        return getClass().getName().compareTo(other.getClass().getName());
-      }
-
-      int lastComparison = 0;
-      open_args typedOther = (open_args)other;
-
-      lastComparison = Boolean.valueOf(isSetClassName()).compareTo(typedOther.isSetClassName());
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-      if (isSetClassName()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.className, typedOther.className);
-        if (lastComparison != 0) {
-          return lastComparison;
-        }
-      }
-      return 0;
-    }
-
-    public _Fields fieldForId(int fieldId) {
-      return _Fields.findByThriftId(fieldId);
-    }
-
-    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
-    }
-
-    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
-    }
-
-    @Override
-    public String toString() {
-      StringBuilder sb = new StringBuilder("open_args(");
-      boolean first = true;
-
-      sb.append("className:");
-      if (this.className == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.className);
-      }
-      first = false;
-      sb.append(")");
-      return sb.toString();
-    }
-
-    public void validate() throws org.apache.thrift.TException {
-      // check for required fields
-      // check for sub-struct validity
-    }
-
-    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-      try {
-        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
-      }
-    }
-
-    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
-      try {
-        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
-      }
-    }
-
-    private static class open_argsStandardSchemeFactory implements SchemeFactory {
-      public open_argsStandardScheme getScheme() {
-        return new open_argsStandardScheme();
-      }
-    }
-
-    private static class open_argsStandardScheme extends StandardScheme<open_args> {
-
-      public void read(org.apache.thrift.protocol.TProtocol iprot, open_args struct) throws org.apache.thrift.TException {
-        org.apache.thrift.protocol.TField schemeField;
-        iprot.readStructBegin();
-        while (true)
-        {
-          schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-            break;
-          }
-          switch (schemeField.id) {
-            case 1: // CLASS_NAME
-              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-                struct.className = iprot.readString();
-                struct.setClassNameIsSet(true);
-              } else { 
-                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-              }
-              break;
-            default:
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-          }
-          iprot.readFieldEnd();
-        }
-        iprot.readStructEnd();
-
-        // check for required fields of primitive type, which can't be checked in the validate method
-        struct.validate();
-      }
-
-      public void write(org.apache.thrift.protocol.TProtocol oprot, open_args struct) throws org.apache.thrift.TException {
-        struct.validate();
-
-        oprot.writeStructBegin(STRUCT_DESC);
-        if (struct.className != null) {
-          oprot.writeFieldBegin(CLASS_NAME_FIELD_DESC);
-          oprot.writeString(struct.className);
-          oprot.writeFieldEnd();
-        }
-        oprot.writeFieldStop();
-        oprot.writeStructEnd();
-      }
-
-    }
-
-    private static class open_argsTupleSchemeFactory implements SchemeFactory {
-      public open_argsTupleScheme getScheme() {
-        return new open_argsTupleScheme();
-      }
-    }
-
-    private static class open_argsTupleScheme extends TupleScheme<open_args> {
-
-      @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, open_args struct) throws org.apache.thrift.TException {
-        TTupleProtocol oprot = (TTupleProtocol) prot;
-        BitSet optionals = new BitSet();
-        if (struct.isSetClassName()) {
-          optionals.set(0);
-        }
-        oprot.writeBitSet(optionals, 1);
-        if (struct.isSetClassName()) {
-          oprot.writeString(struct.className);
-        }
-      }
-
-      @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, open_args struct) throws org.apache.thrift.TException {
-        TTupleProtocol iprot = (TTupleProtocol) prot;
-        BitSet incoming = iprot.readBitSet(1);
-        if (incoming.get(0)) {
-          struct.className = iprot.readString();
-          struct.setClassNameIsSet(true);
-        }
-      }
-    }
-
-  }
-
-  public static class open_result implements org.apache.thrift.TBase<open_result, open_result._Fields>, java.io.Serializable, Cloneable   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("open_result");
-
-
-    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
-    static {
-      schemes.put(StandardScheme.class, new open_resultStandardSchemeFactory());
-      schemes.put(TupleScheme.class, new open_resultTupleSchemeFactory());
-    }
-
-
-    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-;
-
-      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
-      static {
-        for (_Fields field : EnumSet.allOf(_Fields.class)) {
-          byName.put(field.getFieldName(), field);
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, or null if its not found.
-       */
-      public static _Fields findByThriftId(int fieldId) {
-        switch(fieldId) {
-          default:
-            return null;
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, throwing an exception
-       * if it is not found.
-       */
-      public static _Fields findByThriftIdOrThrow(int fieldId) {
-        _Fields fields = findByThriftId(fieldId);
-        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-        return fields;
-      }
-
-      /**
-       * Find the _Fields constant that matches name, or null if its not found.
-       */
-      public static _Fields findByName(String name) {
-        return byName.get(name);
-      }
-
-      private final short _thriftId;
-      private final String _fieldName;
-
-      _Fields(short thriftId, String fieldName) {
-        _thriftId = thriftId;
-        _fieldName = fieldName;
-      }
-
-      public short getThriftFieldId() {
-        return _thriftId;
-      }
-
-      public String getFieldName() {
-        return _fieldName;
-      }
-    }
-    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-    static {
-      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      metaDataMap = Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(open_result.class, metaDataMap);
-    }
-
-    public open_result() {
-    }
-
-    /**
-     * Performs a deep copy on <i>other</i>.
-     */
-    public open_result(open_result other) {
-    }
-
-    public open_result deepCopy() {
-      return new open_result(this);
-    }
-
-    @Override
-    public void clear() {
-    }
-
-    public void setFieldValue(_Fields field, Object value) {
-      switch (field) {
-      }
-    }
-
-    public Object getFieldValue(_Fields field) {
-      switch (field) {
-      }
-      throw new IllegalStateException();
-    }
-
-    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-    public boolean isSet(_Fields field) {
-      if (field == null) {
-        throw new IllegalArgumentException();
-      }
-
-      switch (field) {
-      }
-      throw new IllegalStateException();
-    }
-
-    @Override
-    public boolean equals(Object that) {
-      if (that == null)
-        return false;
-      if (that instanceof open_result)
-        return this.equals((open_result)that);
-      return false;
-    }
-
-    public boolean equals(open_result that) {
-      if (that == null)
-        return false;
-
-      return true;
-    }
-
-    @Override
-    public int hashCode() {
-      return 0;
-    }
-
-    public int compareTo(open_result other) {
-      if (!getClass().equals(other.getClass())) {
-        return getClass().getName().compareTo(other.getClass().getName());
-      }
-
-      int lastComparison = 0;
-      open_result typedOther = (open_result)other;
-
-      return 0;
-    }
-
-    public _Fields fieldForId(int fieldId) {
-      return _Fields.findByThriftId(fieldId);
-    }
-
-    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
-    }
-
-    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
-      }
-
-    @Override
-    public String toString() {
-      StringBuilder sb = new StringBuilder("open_result(");
-      boolean first = true;
-
-      sb.append(")");
-      return sb.toString();
-    }
-
-    public void validate() throws org.apache.thrift.TException {
-      // check for required fields
-      // check for sub-struct validity
-    }
-
-    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-      try {
-        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
-      }
-    }
-
-    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
-      try {
-        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
-      }
-    }
-
-    private static class open_resultStandardSchemeFactory implements SchemeFactory {
-      public open_resultStandardScheme getScheme() {
-        return new open_resultStandardScheme();
-      }
-    }
-
-    private static class open_resultStandardScheme extends StandardScheme<open_result> {
-
-      public void read(org.apache.thrift.protocol.TProtocol iprot, open_result struct) throws org.apache.thrift.TException {
-        org.apache.thrift.protocol.TField schemeField;
-        iprot.readStructBegin();
-        while (true)
-        {
-          schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-            break;
-          }
-          switch (schemeField.id) {
-            default:
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-          }
-          iprot.readFieldEnd();
-        }
-        iprot.readStructEnd();
-
-        // check for required fields of primitive type, which can't be checked in the validate method
-        struct.validate();
-      }
-
-      public void write(org.apache.thrift.protocol.TProtocol oprot, open_result struct) throws org.apache.thrift.TException {
-        struct.validate();
-
-        oprot.writeStructBegin(STRUCT_DESC);
-        oprot.writeFieldStop();
-        oprot.writeStructEnd();
-      }
-
-    }
-
-    private static class open_resultTupleSchemeFactory implements SchemeFactory {
-      public open_resultTupleScheme getScheme() {
-        return new open_resultTupleScheme();
-      }
-    }
-
-    private static class open_resultTupleScheme extends TupleScheme<open_result> {
-
-      @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, open_result struct) throws org.apache.thrift.TException {
-        TTupleProtocol oprot = (TTupleProtocol) prot;
-      }
-
-      @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, open_result struct) throws org.apache.thrift.TException {
-        TTupleProtocol iprot = (TTupleProtocol) prot;
-      }
-    }
-
-  }
-
-  public static class close_args implements org.apache.thrift.TBase<close_args, close_args._Fields>, java.io.Serializable, Cloneable   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("close_args");
-
-    private static final org.apache.thrift.protocol.TField CLASS_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("className", org.apache.thrift.protocol.TType.STRING, (short)1);
-
-    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
-    static {
-      schemes.put(StandardScheme.class, new close_argsStandardSchemeFactory());
-      schemes.put(TupleScheme.class, new close_argsTupleSchemeFactory());
-    }
-
-    public String className; // requi

<TRUNCATED>

[10/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/FIFOScheduler.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/FIFOScheduler.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/FIFOScheduler.java
deleted file mode 100644
index 078cd3c..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/FIFOScheduler.java
+++ /dev/null
@@ -1,134 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-
-import com.nflabs.zeppelin.scheduler.Job.Status;
-
-/**
- * TODO(moon) : add description.
- *
- * @author Leemoonsoo
- *
- */
-public class FIFOScheduler implements Scheduler {
-  List<Job> queue = new LinkedList<Job>();
-  private ExecutorService executor;
-  private SchedulerListener listener;
-  boolean terminate = false;
-  Job runningJob = null;
-  private String name;
-
-  public FIFOScheduler(String name, ExecutorService executor, SchedulerListener listener) {
-    this.name = name;
-    this.executor = executor;
-    this.listener = listener;
-  }
-
-  @Override
-  public String getName() {
-    return name;
-  }
-
-  @Override
-  public Collection<Job> getJobsWaiting() {
-    List<Job> ret = new LinkedList<Job>();
-    synchronized (queue) {
-      for (Job job : queue) {
-        ret.add(job);
-      }
-    }
-    return ret;
-  }
-
-  @Override
-  public Collection<Job> getJobsRunning() {
-    List<Job> ret = new LinkedList<Job>();
-    Job job = runningJob;
-
-    if (job != null) {
-      ret.add(job);
-    }
-
-    return ret;
-  }
-
-
-
-  @Override
-  public void submit(Job job) {
-    job.setStatus(Status.PENDING);
-    synchronized (queue) {
-      queue.add(job);
-      queue.notify();
-    }
-  }
-
-  @Override
-  public void run() {
-
-    synchronized (queue) {
-      while (terminate == false) {
-        if (runningJob != null || queue.isEmpty() == true) {
-          try {
-            queue.wait(500);
-          } catch (InterruptedException e) {
-          }
-          continue;
-        }
-
-        runningJob = queue.remove(0);
-
-        final Scheduler scheduler = this;
-        this.executor.execute(new Runnable() {
-          @Override
-          public void run() {
-            if (runningJob.isAborted()) {
-              runningJob.setStatus(Status.ABORT);
-              runningJob.aborted = false;
-              synchronized (queue) {
-                queue.notify();
-              }
-              return;
-            }
-
-            runningJob.setStatus(Status.RUNNING);
-            if (listener != null) {
-              listener.jobStarted(scheduler, runningJob);
-            }
-            runningJob.run();
-            if (runningJob.isAborted()) {
-              runningJob.setStatus(Status.ABORT);
-            } else {
-              if (runningJob.getException() != null) {
-                runningJob.setStatus(Status.ERROR);
-              } else {
-                runningJob.setStatus(Status.FINISHED);
-              }
-            }
-            if (listener != null) {
-              listener.jobFinished(scheduler, runningJob);
-            }
-            // reset aborted flag to allow retry
-            runningJob.aborted = false;
-            runningJob = null;
-            synchronized (queue) {
-              queue.notify();
-            }
-          }
-        });
-      }
-    }
-  }
-
-  @Override
-  public void stop() {
-    terminate = true;
-    synchronized (queue) {
-      queue.notify();
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/Job.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/Job.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/Job.java
deleted file mode 100644
index 29f72b5..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/Job.java
+++ /dev/null
@@ -1,246 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Map;
-
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * Skeletal implementation of the Job concept.
- *  - designed for inheritance
- *  - should be run on a separate thread
- *  - maintains internal state: it's status
- *  - supports listeners who are updated on status change
- *
- *  Job class is serialized/deserialized and used server<->client communication
- *  and saving/loading jobs from disk.
- *  Changing/adding/deleting non transitive field name need consideration of that.
- *
- *  @author Leemoonsoo
- */
-public abstract class Job {
-  /**
-   * Job status.
-   *
-   * READY - Job is not running, ready to run.
-   * PENDING - Job is submitted to scheduler. but not running yet
-   * RUNNING - Job is running.
-   * FINISHED - Job finished run. with success
-   * ERROR - Job finished run. with error
-   * ABORT - Job finished by abort
-   *
-   */
-  public static enum Status {
-    READY,
-    PENDING,
-    RUNNING,
-    FINISHED,
-    ERROR,
-    ABORT;
-    boolean isReady() {
-      return this == READY;
-    }
-
-    boolean isRunning() {
-      return this == RUNNING;
-    }
-
-    boolean isPending() {
-      return this == PENDING;
-    }
-  }
-
-  private String jobName;
-  String id;
-  Object result;
-  Date dateCreated;
-  Date dateStarted;
-  Date dateFinished;
-  Status status;
-
-  transient boolean aborted = false;
-
-  String errorMessage;
-  private transient Throwable exception;
-  private transient JobListener listener;
-  private long progressUpdateIntervalMs;
-
-  public Job(String jobName, JobListener listener, long progressUpdateIntervalMs) {
-    this.jobName = jobName;
-    this.listener = listener;
-    this.progressUpdateIntervalMs = progressUpdateIntervalMs;
-
-    dateCreated = new Date();
-    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd-HHmmss");
-    id = dateFormat.format(dateCreated) + "_" + super.hashCode();
-
-    setStatus(Status.READY);
-  }
-
-  public Job(String jobName, JobListener listener) {
-    this(jobName, listener, JobProgressPoller.DEFAULT_INTERVAL_MSEC);
-  }
-
-  public Job(String jobId, String jobName, JobListener listener, long progressUpdateIntervalMs) {
-    this.jobName = jobName;
-    this.listener = listener;
-    this.progressUpdateIntervalMs = progressUpdateIntervalMs;
-
-    id = jobId;
-
-    setStatus(Status.READY);
-  }
-
-  public String getId() {
-    return id;
-  }
-
-  @Override
-  public int hashCode() {
-    return id.hashCode();
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    return ((Job) o).hashCode() == hashCode();
-  }
-
-  public Status getStatus() {
-    return status;
-  }
-
-  public void setStatus(Status status) {
-    if (this.status == status) {
-      return;
-    }
-    Status before = this.status;
-    Status after = status;
-    if (listener != null) {
-      listener.beforeStatusChange(this, before, after);
-    }
-    this.status = status;
-    if (listener != null) {
-      listener.afterStatusChange(this, before, after);
-    }
-  }
-
-  public void setListener(JobListener listener) {
-    this.listener = listener;
-  }
-
-  public JobListener getListener() {
-    return listener;
-  }
-
-  public boolean isTerminated() {
-    return !this.status.isReady() && !this.status.isRunning() && !this.status.isPending();
-  }
-
-  public boolean isRunning() {
-    return this.status.isRunning();
-  }
-
-  public void run() {
-    JobProgressPoller progressUpdator = null;
-    try {
-      progressUpdator = new JobProgressPoller(this, progressUpdateIntervalMs);
-      progressUpdator.start();
-      dateStarted = new Date();
-      result = jobRun();
-      this.exception = null;
-      errorMessage = null;
-      dateFinished = new Date();
-      progressUpdator.terminate();
-    } catch (NullPointerException e) {
-      logger().error("Job failed", e);
-      progressUpdator.terminate();
-      this.exception = e;
-      result = e.getMessage();
-      errorMessage = getStack(e);
-      dateFinished = new Date();
-    } catch (Throwable e) {
-      logger().error("Job failed", e);
-      progressUpdator.terminate();
-      this.exception = e;
-      result = e.getMessage();
-      errorMessage = getStack(e);
-      dateFinished = new Date();
-    } finally {
-      //aborted = false;
-    }
-  }
-
-  public String getStack(Throwable e) {
-    StackTraceElement[] stacks = e.getStackTrace();
-    if (stacks == null) {
-      return "";
-    }
-    String ss = "";
-    for (StackTraceElement s : stacks) {
-      ss += s.toString() + "\n";
-    }
-
-    return ss;
-  }
-
-  public Throwable getException() {
-    return exception;
-  }
-
-  protected void setException(Throwable t) {
-    exception = t;
-    errorMessage = getStack(t);
-  }
-
-  public Object getReturn() {
-    return result;
-  }
-
-  public String getJobName() {
-    return jobName;
-  }
-
-  public void setJobName(String jobName) {
-    this.jobName = jobName;
-  }
-
-  public abstract int progress();
-
-  public abstract Map<String, Object> info();
-
-  protected abstract Object jobRun() throws Throwable;
-
-  protected abstract boolean jobAbort();
-
-  public void abort() {
-    aborted = jobAbort();
-  }
-
-  public boolean isAborted() {
-    return aborted;
-  }
-
-  public Date getDateCreated() {
-    return dateCreated;
-  }
-
-  public Date getDateStarted() {
-    return dateStarted;
-  }
-
-  public Date getDateFinished() {
-    return dateFinished;
-  }
-
-  private Logger logger() {
-    return LoggerFactory.getLogger(Job.class);
-  }
-
-  protected void setResult(Object result) {
-    this.result = result;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/JobListener.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/JobListener.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/JobListener.java
deleted file mode 100644
index 0e573a1..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/JobListener.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-/**
- * TODO(moon) : add description.
- * 
- * @author Leemoonsoo
- *
- */
-public interface JobListener {
-  public void onProgressUpdate(Job job, int progress);
-
-  public void beforeStatusChange(Job job, Job.Status before, Job.Status after);
-
-  public void afterStatusChange(Job job, Job.Status before, Job.Status after);
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/JobProgressPoller.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/JobProgressPoller.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/JobProgressPoller.java
deleted file mode 100644
index 142842a..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/JobProgressPoller.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * TODO(moon) : add description.
- *
- * @author Leemoonsoo
- *
- */
-public class JobProgressPoller extends Thread {
-  public static final long DEFAULT_INTERVAL_MSEC = 500;
-  Logger logger = LoggerFactory.getLogger(JobProgressPoller.class);
-  private Job job;
-  private long intervalMs;
-  boolean terminate = false;
-
-  public JobProgressPoller(Job job, long intervalMs) {
-    this.job = job;
-    this.intervalMs = intervalMs;
-  }
-
-  @Override
-  public void run() {
-    if (intervalMs < 0) {
-      return;
-    } else if (intervalMs == 0) {
-      intervalMs = DEFAULT_INTERVAL_MSEC;
-    }
-
-    while (terminate == false) {
-      JobListener listener = job.getListener();
-      if (listener != null) {
-        try {
-          if (job.isRunning()) {
-            listener.onProgressUpdate(job, job.progress());
-          }
-        } catch (Exception e) {
-          logger.error("Can not get or update progress", e);
-        }
-      }
-      try {
-        Thread.sleep(intervalMs);
-      } catch (InterruptedException e) {
-      }
-    }
-  }
-
-  public void terminate() {
-    terminate = true;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/ParallelScheduler.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/ParallelScheduler.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/ParallelScheduler.java
deleted file mode 100644
index d28e125..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/ParallelScheduler.java
+++ /dev/null
@@ -1,162 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-
-import com.nflabs.zeppelin.scheduler.Job.Status;
-
-/**
- * TODO(moon) : add description.
- *
- * @author Leemoonsoo
- *
- */
-public class ParallelScheduler implements Scheduler {
-  List<Job> queue = new LinkedList<Job>();
-  List<Job> running = new LinkedList<Job>();
-  private ExecutorService executor;
-  private SchedulerListener listener;
-  boolean terminate = false;
-  private String name;
-  private int maxConcurrency;
-
-  public ParallelScheduler(String name, ExecutorService executor, SchedulerListener listener,
-      int maxConcurrency) {
-    this.name = name;
-    this.executor = executor;
-    this.listener = listener;
-    this.maxConcurrency = maxConcurrency;
-  }
-
-  @Override
-  public String getName() {
-    return name;
-  }
-
-  @Override
-  public Collection<Job> getJobsWaiting() {
-    List<Job> ret = new LinkedList<Job>();
-    synchronized (queue) {
-      for (Job job : queue) {
-        ret.add(job);
-      }
-    }
-    return ret;
-  }
-
-  @Override
-  public Collection<Job> getJobsRunning() {
-    List<Job> ret = new LinkedList<Job>();
-    synchronized (queue) {
-      for (Job job : running) {
-        ret.add(job);
-      }
-    }
-    return ret;
-  }
-
-
-
-  @Override
-  public void submit(Job job) {
-    job.setStatus(Status.PENDING);
-    synchronized (queue) {
-      queue.add(job);
-      queue.notify();
-    }
-  }
-
-  @Override
-  public void run() {
-
-    synchronized (queue) {
-      while (terminate == false) {
-        if (running.size() >= maxConcurrency || queue.isEmpty() == true) {
-          try {
-            queue.wait(500);
-          } catch (InterruptedException e) {
-          }
-          continue;
-        }
-
-        Job job = queue.remove(0);
-        running.add(job);
-        Scheduler scheduler = this;
-
-        executor.execute(new JobRunner(scheduler, job));
-      }
-
-
-    }
-  }
-
-  public void setMaxConcurrency(int maxConcurrency) {
-    this.maxConcurrency = maxConcurrency;
-    synchronized (queue) {
-      queue.notify();
-    }
-  }
-
-  private class JobRunner implements Runnable {
-    private Scheduler scheduler;
-    private Job job;
-
-    public JobRunner(Scheduler scheduler, Job job) {
-      this.scheduler = scheduler;
-      this.job = job;
-    }
-
-    @Override
-    public void run() {
-      if (job.isAborted()) {
-        job.setStatus(Status.ABORT);
-        job.aborted = false;
-
-        synchronized (queue) {
-          running.remove(job);
-          queue.notify();
-        }
-
-        return;
-      }
-
-      job.setStatus(Status.RUNNING);
-      if (listener != null) {
-        listener.jobStarted(scheduler, job);
-      }
-      job.run();
-      if (job.isAborted()) {
-        job.setStatus(Status.ABORT);
-      } else {
-        if (job.getException() != null) {
-          job.setStatus(Status.ERROR);
-        } else {
-          job.setStatus(Status.FINISHED);
-        }
-      }
-
-      if (listener != null) {
-        listener.jobFinished(scheduler, job);
-      }
-
-      // reset aborted flag to allow retry
-      job.aborted = false;
-      synchronized (queue) {
-        running.remove(job);
-        queue.notify();
-      }
-    }
-  }
-
-
-  @Override
-  public void stop() {
-    terminate = true;
-    synchronized (queue) {
-      queue.notify();
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/RemoteScheduler.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/RemoteScheduler.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/RemoteScheduler.java
deleted file mode 100644
index 14baa9b..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/RemoteScheduler.java
+++ /dev/null
@@ -1,357 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-
-import org.apache.thrift.TException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.nflabs.zeppelin.interpreter.remote.RemoteInterpreterProcess;
-import com.nflabs.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
-import com.nflabs.zeppelin.scheduler.Job.Status;
-
-/**
- *
- */
-public class RemoteScheduler implements Scheduler {
-  Logger logger = LoggerFactory.getLogger(RemoteScheduler.class);
-
-  List<Job> queue = new LinkedList<Job>();
-  List<Job> running = new LinkedList<Job>();
-  private ExecutorService executor;
-  private SchedulerListener listener;
-  boolean terminate = false;
-  private String name;
-  private int maxConcurrency;
-  private RemoteInterpreterProcess interpreterProcess;
-
-  public RemoteScheduler(String name, ExecutorService executor,
-      RemoteInterpreterProcess interpreterProcess, SchedulerListener listener,
-      int maxConcurrency) {
-    this.name = name;
-    this.executor = executor;
-    this.listener = listener;
-    this.interpreterProcess = interpreterProcess;
-    this.maxConcurrency = maxConcurrency;
-  }
-
-  @Override
-  public void run() {
-    while (terminate == false) {
-      Job job = null;
-
-      synchronized (queue) {
-        if (running.size() >= maxConcurrency || queue.isEmpty() == true) {
-          try {
-            queue.wait(500);
-          } catch (InterruptedException e) {
-          }
-          continue;
-        }
-
-        job = queue.remove(0);
-        running.add(job);
-      }
-
-      // run
-      Scheduler scheduler = this;
-      JobRunner jobRunner = new JobRunner(scheduler, job);
-      executor.execute(jobRunner);
-
-      // wait until it is submitted to the remote
-      while (!jobRunner.isJobSubmittedInRemote()) {
-        synchronized (queue) {
-          try {
-            queue.wait(500);
-          } catch (InterruptedException e) {
-          }
-        }
-      }
-    }
-  }
-
-  @Override
-  public String getName() {
-    return name;
-  }
-
-  @Override
-  public Collection<Job> getJobsWaiting() {
-    List<Job> ret = new LinkedList<Job>();
-    synchronized (queue) {
-      for (Job job : queue) {
-        ret.add(job);
-      }
-    }
-    return ret;
-  }
-
-  @Override
-  public Collection<Job> getJobsRunning() {
-    List<Job> ret = new LinkedList<Job>();
-    synchronized (queue) {
-      for (Job job : running) {
-        ret.add(job);
-      }
-    }
-    return ret;
-  }
-
-  @Override
-  public void submit(Job job) {
-    job.setStatus(Status.PENDING);
-
-    synchronized (queue) {
-      queue.add(job);
-      queue.notify();
-    }
-  }
-
-  public void setMaxConcurrency(int maxConcurrency) {
-    this.maxConcurrency = maxConcurrency;
-    synchronized (queue) {
-      queue.notify();
-    }
-  }
-
-  /**
-   * Role of the class is get status info from remote process from PENDING to
-   * RUNNING status.
-   */
-  private class JobStatusPoller extends Thread {
-    private long initialPeriodMsec;
-    private long initialPeriodCheckIntervalMsec;
-    private long checkIntervalMsec;
-    private boolean terminate;
-    private JobListener listener;
-    private Job job;
-    Status lastStatus;
-
-    public JobStatusPoller(long initialPeriodMsec,
-        long initialPeriodCheckIntervalMsec, long checkIntervalMsec, Job job,
-        JobListener listener) {
-      this.initialPeriodMsec = initialPeriodMsec;
-      this.initialPeriodCheckIntervalMsec = initialPeriodCheckIntervalMsec;
-      this.checkIntervalMsec = checkIntervalMsec;
-      this.job = job;
-      this.listener = listener;
-      this.terminate = false;
-    }
-
-    @Override
-    public void run() {
-      long started = System.currentTimeMillis();
-      while (terminate == false) {
-        long current = System.currentTimeMillis();
-        long interval;
-        if (current - started < initialPeriodMsec) {
-          interval = initialPeriodCheckIntervalMsec;
-        } else {
-          interval = checkIntervalMsec;
-        }
-
-        synchronized (this) {
-          try {
-            this.wait(interval);
-          } catch (InterruptedException e) {
-          }
-        }
-
-
-        Status newStatus = getStatus();
-        if (newStatus == null) { // unknown
-          continue;
-        }
-
-        if (newStatus != Status.READY && newStatus != Status.PENDING) {
-          // we don't need more
-          continue;
-        }
-      }
-    }
-
-    public void shutdown() {
-      terminate = true;
-      synchronized (this) {
-        this.notify();
-      }
-    }
-
-
-    private Status getLastStatus() {
-      if (terminate == true) {
-        if (lastStatus != Status.FINISHED &&
-            lastStatus != Status.ERROR &&
-            lastStatus != Status.ABORT) {
-          return Status.FINISHED;
-        } else {
-          return (lastStatus == null) ? Status.FINISHED : lastStatus;
-        }
-      } else {
-        return (lastStatus == null) ? Status.FINISHED : lastStatus;
-      }
-    }
-
-    public synchronized Job.Status getStatus() {
-      if (interpreterProcess.referenceCount() <= 0) {
-        return getLastStatus();
-      }
-
-      Client client;
-      try {
-        client = interpreterProcess.getClient();
-      } catch (Exception e) {
-        logger.error("Can't get status information", e);
-        lastStatus = Status.ERROR;
-        return Status.ERROR;
-      }
-
-      try {
-        String statusStr = client.getStatus(job.getId());
-        if ("Unknown".equals(statusStr)) {
-          // not found this job in the remote schedulers.
-          // maybe not submitted, maybe already finished
-          Status status = getLastStatus();
-          listener.afterStatusChange(job, null, status);
-          return status;
-        }
-        Status status = Status.valueOf(statusStr);
-        lastStatus = status;
-        listener.afterStatusChange(job, null, status);
-        return status;
-      } catch (TException e) {
-        logger.error("Can't get status information", e);
-        lastStatus = Status.ERROR;
-        return Status.ERROR;
-      } catch (Exception e) {
-        logger.error("Unknown status", e);
-        lastStatus = Status.ERROR;
-        return Status.ERROR;
-      } finally {
-        interpreterProcess.releaseClient(client);
-      }
-    }
-  }
-
-  private class JobRunner implements Runnable, JobListener {
-    private Scheduler scheduler;
-    private Job job;
-    private boolean jobExecuted;
-    boolean jobSubmittedRemotely;
-
-    public JobRunner(Scheduler scheduler, Job job) {
-      this.scheduler = scheduler;
-      this.job = job;
-      jobExecuted = false;
-      jobSubmittedRemotely = false;
-    }
-
-    public boolean isJobSubmittedInRemote() {
-      return jobSubmittedRemotely;
-    }
-
-    @Override
-    public void run() {
-      if (job.isAborted()) {
-        job.setStatus(Status.ABORT);
-        job.aborted = false;
-
-        synchronized (queue) {
-          running.remove(job);
-          queue.notify();
-        }
-
-        return;
-      }
-
-      JobStatusPoller jobStatusPoller = new JobStatusPoller(1500, 100, 500,
-          job, this);
-      jobStatusPoller.start();
-
-      if (listener != null) {
-        listener.jobStarted(scheduler, job);
-      }
-      job.run();
-      jobExecuted = true;
-      jobSubmittedRemotely = true;
-
-      jobStatusPoller.shutdown();
-      try {
-        jobStatusPoller.join();
-      } catch (InterruptedException e) {
-        logger.error("JobStatusPoller interrupted", e);
-      }
-
-      job.setStatus(jobStatusPoller.getStatus());
-      if (listener != null) {
-        listener.jobFinished(scheduler, job);
-      }
-
-      // reset aborted flag to allow retry
-      job.aborted = false;
-
-      synchronized (queue) {
-        running.remove(job);
-        queue.notify();
-      }
-    }
-
-    @Override
-    public void onProgressUpdate(Job job, int progress) {
-    }
-
-    @Override
-    public void beforeStatusChange(Job job, Status before, Status after) {
-    }
-
-    @Override
-    public void afterStatusChange(Job job, Status before, Status after) {
-      if (after == null) { // unknown. maybe before sumitted remotely, maybe already finished.
-        if (jobExecuted) {
-          jobSubmittedRemotely = true;
-          if (job.isAborted()) {
-            job.setStatus(Status.ABORT);
-          } else if (job.getException() != null) {
-            job.setStatus(Status.ERROR);
-          } else {
-            job.setStatus(Status.FINISHED);
-          }
-        }
-        return;
-      }
-
-
-      // Update remoteStatus
-      if (jobExecuted == false) {
-        if (after == Status.FINISHED || after == Status.ABORT
-            || after == Status.ERROR) {
-          // it can be status of last run.
-          // so not updating the remoteStatus
-          return;
-        } else if (after == Status.RUNNING) {
-          jobSubmittedRemotely = true;
-        }
-      } else {
-        jobSubmittedRemotely = true;
-      }
-
-      // status polled by status poller
-      if (job.getStatus() != after) {
-        job.setStatus(after);
-      }
-    }
-  }
-
-  @Override
-  public void stop() {
-    terminate = true;
-    synchronized (queue) {
-      queue.notify();
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/Scheduler.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/Scheduler.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/Scheduler.java
deleted file mode 100644
index e772c38..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/Scheduler.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import java.util.Collection;
-
-/**
- * TODO(moon) : add description.
- * 
- * @author Leemoonsoo
- *
- */
-public interface Scheduler extends Runnable {
-  public String getName();
-
-  public Collection<Job> getJobsWaiting();
-
-  public Collection<Job> getJobsRunning();
-
-  public void submit(Job job);
-
-  public void stop();
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/SchedulerFactory.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/SchedulerFactory.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/SchedulerFactory.java
deleted file mode 100644
index 115e2b1..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/SchedulerFactory.java
+++ /dev/null
@@ -1,129 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-import java.util.Collection;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.nflabs.zeppelin.interpreter.remote.RemoteInterpreterProcess;
-
-/**
- * TODO(moon) : add description.
- *
- * @author Leemoonsoo
- *
- */
-public class SchedulerFactory implements SchedulerListener {
-  private final Logger logger = LoggerFactory.getLogger(SchedulerFactory.class);
-  ScheduledExecutorService executor;
-  Map<String, Scheduler> schedulers = new LinkedHashMap<String, Scheduler>();
-
-  private static SchedulerFactory singleton;
-  private static Long singletonLock = new Long(0);
-
-  public static SchedulerFactory singleton() {
-    if (singleton == null) {
-      synchronized (singletonLock) {
-        if (singleton == null) {
-          try {
-            singleton = new SchedulerFactory();
-          } catch (Exception e) {
-            e.printStackTrace();
-          }
-        }
-      }
-    }
-    return singleton;
-  }
-
-  public SchedulerFactory() throws Exception {
-    executor = Executors.newScheduledThreadPool(100);
-  }
-
-  public void destroy() {
-    executor.shutdown();
-  }
-
-  public Scheduler createOrGetFIFOScheduler(String name) {
-    synchronized (schedulers) {
-      if (schedulers.containsKey(name) == false) {
-        Scheduler s = new FIFOScheduler(name, executor, this);
-        schedulers.put(name, s);
-        executor.execute(s);
-      }
-      return schedulers.get(name);
-    }
-  }
-
-  public Scheduler createOrGetParallelScheduler(String name, int maxConcurrency) {
-    synchronized (schedulers) {
-      if (schedulers.containsKey(name) == false) {
-        Scheduler s = new ParallelScheduler(name, executor, this, maxConcurrency);
-        schedulers.put(name, s);
-        executor.execute(s);
-      }
-      return schedulers.get(name);
-    }
-  }
-
-  public Scheduler createOrGetRemoteScheduler(
-      String name,
-      RemoteInterpreterProcess interpreterProcess,
-      int maxConcurrency) {
-
-    synchronized (schedulers) {
-      if (schedulers.containsKey(name) == false) {
-        Scheduler s = new RemoteScheduler(
-            name,
-            executor,
-            interpreterProcess,
-            this,
-            maxConcurrency);
-        schedulers.put(name, s);
-        executor.execute(s);
-      }
-      return schedulers.get(name);
-    }
-  }
-
-  public Scheduler removeScheduler(String name) {
-    synchronized (schedulers) {
-      Scheduler s = schedulers.remove(name);
-      if (s != null) {
-        s.stop();
-      }
-    }
-    return null;
-  }
-
-  public Collection<Scheduler> listScheduler(String name) {
-    List<Scheduler> s = new LinkedList<Scheduler>();
-    synchronized (schedulers) {
-      for (Scheduler ss : schedulers.values()) {
-        s.add(ss);
-      }
-    }
-    return s;
-  }
-
-  @Override
-  public void jobStarted(Scheduler scheduler, Job job) {
-    logger.info("Job " + job.getJobName() + " started by scheduler " + scheduler.getName());
-
-  }
-
-  @Override
-  public void jobFinished(Scheduler scheduler, Job job) {
-    logger.info("Job " + job.getJobName() + " finished by scheduler " + scheduler.getName());
-
-  }
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/SchedulerListener.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/SchedulerListener.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/SchedulerListener.java
deleted file mode 100644
index d551679..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/scheduler/SchedulerListener.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package com.nflabs.zeppelin.scheduler;
-
-/**
- * TODO(moon) : add description.
- * 
- * @author Leemoonsoo
- *
- */
-public interface SchedulerListener {
-  public void jobStarted(Scheduler scheduler, Job job);
-
-  public void jobFinished(Scheduler scheduler, Job job);
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/GUI.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/GUI.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/GUI.java
new file mode 100644
index 0000000..8ae7630
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/GUI.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.display;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.zeppelin.display.Input.ParamOption;
+
+/**
+ * Settings of a form.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class GUI implements Serializable {
+
+  Map<String, Object> params = new HashMap<String, Object>(); // form parameters from client
+  Map<String, Input> forms = new TreeMap<String, Input>(); // form configuration
+
+  public GUI() {
+
+  }
+
+  public void setParams(Map<String, Object> values) {
+    this.params = values;
+  }
+
+  public Map<String, Object> getParams() {
+    return params;
+  }
+
+  public Map<String, Input> getForms() {
+    return forms;
+  }
+
+  public void setForms(Map<String, Input> forms) {
+    this.forms = forms;
+  }
+
+  public Object input(String id, Object defaultValue) {
+    // first find values from client and then use default
+    Object value = params.get(id);
+    if (value == null) {
+      value = defaultValue;
+    }
+
+    forms.put(id, new Input(id, defaultValue));
+    return value;
+  }
+
+  public Object input(String id) {
+    return input(id, "");
+  }
+
+  public Object select(String id, Object defaultValue, ParamOption[] options) {
+    Object value = params.get(id);
+    if (value == null) {
+      value = defaultValue;
+    }
+    forms.put(id, new Input(id, defaultValue, options));
+    return value;
+  }
+
+  public void clear() {
+    this.forms = new TreeMap<String, Input>();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/Input.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/Input.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/Input.java
new file mode 100644
index 0000000..2f7858c
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/display/Input.java
@@ -0,0 +1,476 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.display;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Input type.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class Input implements Serializable {
+  /**
+   * Parameters option.
+   *
+   * @author Leemoonsoo
+   *
+   */
+  public static class ParamOption {
+    Object value;
+    String displayName;
+
+    public ParamOption(Object value, String displayName) {
+      super();
+      this.value = value;
+      this.displayName = displayName;
+    }
+
+    public Object getValue() {
+      return value;
+    }
+
+    public void setValue(Object value) {
+      this.value = value;
+    }
+
+    public String getDisplayName() {
+      return displayName;
+    }
+
+    public void setDisplayName(String displayName) {
+      this.displayName = displayName;
+    }
+
+  }
+
+  String name;
+  String displayName;
+  String type;
+  Object defaultValue;
+  ParamOption[] options;
+  boolean hidden;
+
+  public Input(String name, Object defaultValue) {
+    this.name = name;
+    this.displayName = name;
+    this.defaultValue = defaultValue;
+  }
+
+  public Input(String name, Object defaultValue, ParamOption[] options) {
+    this.name = name;
+    this.displayName = name;
+    this.defaultValue = defaultValue;
+    this.options = options;
+  }
+
+
+  public Input(String name, String displayName, String type, Object defaultValue,
+      ParamOption[] options, boolean hidden) {
+    super();
+    this.name = name;
+    this.displayName = displayName;
+    this.type = type;
+    this.defaultValue = defaultValue;
+    this.options = options;
+    this.hidden = hidden;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    return name.equals(((Input) o).getName());
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getDisplayName() {
+    return displayName;
+  }
+
+  public void setDisplayName(String displayName) {
+    this.displayName = displayName;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public Object getDefaultValue() {
+    return defaultValue;
+  }
+
+  public void setDefaultValue(Object defaultValue) {
+    this.defaultValue = defaultValue;
+  }
+
+  public ParamOption[] getOptions() {
+    return options;
+  }
+
+  public void setOptions(ParamOption[] options) {
+    this.options = options;
+  }
+
+  public boolean isHidden() {
+    return hidden;
+  }
+
+
+  private static String[] getNameAndDisplayName(String str) {
+    Pattern p = Pattern.compile("([^(]*)\\s*[(]([^)]*)[)]");
+    Matcher m = p.matcher(str.trim());
+    if (m == null || m.find() == false) {
+      return null;
+    }
+    String[] ret = new String[2];
+    ret[0] = m.group(1);
+    ret[1] = m.group(2);
+    return ret;
+  }
+
+  private static String[] getType(String str) {
+    Pattern p = Pattern.compile("([^:]*)\\s*:\\s*(.*)");
+    Matcher m = p.matcher(str.trim());
+    if (m == null || m.find() == false) {
+      return null;
+    }
+    String[] ret = new String[2];
+    ret[0] = m.group(1).trim();
+    ret[1] = m.group(2).trim();
+    return ret;
+  }
+
+  public static Map<String, Input> extractSimpleQueryParam(String script) {
+    Map<String, Input> params = new HashMap<String, Input>();
+    if (script == null) {
+      return params;
+    }
+    String replaced = script;
+
+    Pattern pattern = Pattern.compile("([_])?[$][{]([^=}]*([=][^}]*)?)[}]");
+
+    Matcher match = pattern.matcher(replaced);
+    while (match.find()) {
+      String hiddenPart = match.group(1);
+      boolean hidden = false;
+      if ("_".equals(hiddenPart)) {
+        hidden = true;
+      }
+      String m = match.group(2);
+
+      String namePart;
+      String valuePart;
+
+      int p = m.indexOf('=');
+      if (p > 0) {
+        namePart = m.substring(0, p);
+        valuePart = m.substring(p + 1);
+      } else {
+        namePart = m;
+        valuePart = null;
+      }
+
+
+      String varName;
+      String displayName = null;
+      String type = null;
+      String defaultValue = "";
+      ParamOption[] paramOptions = null;
+
+      // get var name type
+      String varNamePart;
+      String[] typeArray = getType(namePart);
+      if (typeArray != null) {
+        type = typeArray[0];
+        varNamePart = typeArray[1];
+      } else {
+        varNamePart = namePart;
+      }
+
+      // get var name and displayname
+      String[] varNameArray = getNameAndDisplayName(varNamePart);
+      if (varNameArray != null) {
+        varName = varNameArray[0];
+        displayName = varNameArray[1];
+      } else {
+        varName = varNamePart.trim();
+      }
+
+      // get defaultValue
+      if (valuePart != null) {
+        // find default value
+        int optionP = valuePart.indexOf(",");
+        if (optionP > 0) { // option available
+          defaultValue = valuePart.substring(0, optionP);
+          String optionPart = valuePart.substring(optionP + 1);
+          String[] options = Input.splitPipe(optionPart);
+
+          paramOptions = new ParamOption[options.length];
+
+          for (int i = 0; i < options.length; i++) {
+
+            String[] optNameArray = getNameAndDisplayName(options[i]);
+            if (optNameArray != null) {
+              paramOptions[i] = new ParamOption(optNameArray[0], optNameArray[1]);
+            } else {
+              paramOptions[i] = new ParamOption(options[i], null);
+            }
+          }
+
+
+        } else { // no option
+          defaultValue = valuePart;
+        }
+
+      }
+
+      Input param = new Input(varName, displayName, type, defaultValue, paramOptions, hidden);
+      params.put(varName, param);
+    }
+
+    params.remove("pql");
+    return params;
+  }
+
+  public static String getSimpleQuery(Map<String, Object> params, String script) {
+    String replaced = script;
+
+    for (String key : params.keySet()) {
+      Object value = params.get(key);
+      replaced =
+          replaced.replaceAll("[_]?[$][{]([^:]*[:])?" + key + "([(][^)]*[)])?(=[^}]*)?[}]",
+                              value.toString());
+    }
+
+    Pattern pattern = Pattern.compile("[$][{]([^=}]*[=][^}]*)[}]");
+    while (true) {
+      Matcher match = pattern.matcher(replaced);
+      if (match != null && match.find()) {
+        String m = match.group(1);
+        int p = m.indexOf('=');
+        String replacement = m.substring(p + 1);
+        int optionP = replacement.indexOf(",");
+        if (optionP > 0) {
+          replacement = replacement.substring(0, optionP);
+        }
+        replaced =
+            replaced.replaceFirst("[_]?[$][{]"
+                + m.replaceAll("[(]", ".").replaceAll("[)]", ".").replaceAll("[|]", ".") + "[}]",
+                replacement);
+      } else {
+        break;
+      }
+    }
+
+    replaced = replaced.replace("[_]?[$][{]([^=}]*)[}]", "");
+    return replaced;
+  }
+
+
+  public static String[] split(String str) {
+    return str.split(";(?=([^\"']*\"[^\"']*\")*[^\"']*$)");
+
+  }
+
+  /*
+   * public static String [] splitPipe(String str){ //return
+   * str.split("\\|(?=([^\"']*\"[^\"']*\")*[^\"']*$)"); return
+   * str.split("\\|(?=([^\"']*\"[^\"']*\")*[^\"']*$)"); }
+   */
+
+
+  public static String[] splitPipe(String str) {
+    return split(str, '|');
+  }
+
+  public static String[] split(String str, char split) {
+    return split(str, new String[] {String.valueOf(split)}, false);
+  }
+
+  public static String[] split(String str, String[] splitters, boolean includeSplitter) {
+    String escapeSeq = "\"',;${}";
+    char escapeChar = '\\';
+
+    String[] blockStart = new String[] {"\"", "'", "${", "N_(", "N_<"};
+    String[] blockEnd = new String[] {"\"", "'", "}", "N_)", "N_>"};
+
+    return split(str, escapeSeq, escapeChar, blockStart, blockEnd, splitters, includeSplitter);
+
+  }
+
+  public static String[] split(String str, String escapeSeq, char escapeChar, String[] blockStart,
+      String[] blockEnd, String[] splitters, boolean includeSplitter) {
+
+    List<String> splits = new ArrayList<String>();
+
+    String curString = "";
+
+    boolean escape = false; // true when escape char is found
+    int lastEscapeOffset = -1;
+    int blockStartPos = -1;
+    List<Integer> blockStack = new LinkedList<Integer>();
+
+    for (int i = 0; i < str.length(); i++) {
+      char c = str.charAt(i);
+
+      // escape char detected
+      if (c == escapeChar && escape == false) {
+        escape = true;
+        continue;
+      }
+
+      // escaped char comes
+      if (escape == true) {
+        if (escapeSeq.indexOf(c) < 0) {
+          curString += escapeChar;
+        }
+        curString += c;
+        escape = false;
+        lastEscapeOffset = curString.length();
+        continue;
+      }
+
+      if (blockStack.size() > 0) { // inside of block
+        curString += c;
+        // check multichar block
+        boolean multicharBlockDetected = false;
+        for (int b = 0; b < blockStart.length; b++) {
+          if (blockStartPos >= 0
+              && getBlockStr(blockStart[b]).compareTo(str.substring(blockStartPos, i)) == 0) {
+            blockStack.remove(0);
+            blockStack.add(0, b);
+            multicharBlockDetected = true;
+            break;
+          }
+        }
+
+        if (multicharBlockDetected == true) {
+          continue;
+        }
+
+        // check if current block is nestable
+        if (isNestedBlock(blockStart[blockStack.get(0)]) == true) {
+          // try to find nested block start
+
+          if (curString.substring(lastEscapeOffset + 1).endsWith(
+              getBlockStr(blockStart[blockStack.get(0)])) == true) {
+            blockStack.add(0, blockStack.get(0)); // block is started
+            blockStartPos = i;
+            continue;
+          }
+        }
+
+        // check if block is finishing
+        if (curString.substring(lastEscapeOffset + 1).endsWith(
+            getBlockStr(blockEnd[blockStack.get(0)]))) {
+          // the block closer is one of the splitters (and not nested block)
+          if (isNestedBlock(blockEnd[blockStack.get(0)]) == false) {
+            for (String splitter : splitters) {
+              if (splitter.compareTo(getBlockStr(blockEnd[blockStack.get(0)])) == 0) {
+                splits.add(curString);
+                if (includeSplitter == true) {
+                  splits.add(splitter);
+                }
+                curString = "";
+                lastEscapeOffset = -1;
+
+                break;
+              }
+            }
+          }
+          blockStartPos = -1;
+          blockStack.remove(0);
+          continue;
+        }
+
+      } else { // not in the block
+        boolean splitted = false;
+        for (String splitter : splitters) {
+          // forward check for splitter
+          int curentLenght = i + splitter.length();
+          if (splitter.compareTo(str.substring(i, Math.min(curentLenght, str.length()))) == 0) {
+            splits.add(curString);
+            if (includeSplitter == true) {
+              splits.add(splitter);
+            }
+            curString = "";
+            lastEscapeOffset = -1;
+            i += splitter.length() - 1;
+            splitted = true;
+            break;
+          }
+        }
+        if (splitted == true) {
+          continue;
+        }
+
+        // add char to current string
+        curString += c;
+
+        // check if block is started
+        for (int b = 0; b < blockStart.length; b++) {
+          if (curString.substring(lastEscapeOffset + 1)
+                       .endsWith(getBlockStr(blockStart[b])) == true) {
+            blockStack.add(0, b); // block is started
+            blockStartPos = i;
+            break;
+          }
+        }
+      }
+    }
+    if (curString.length() > 0) {
+      splits.add(curString.trim());
+    }
+    return splits.toArray(new String[] {});
+
+  }
+
+  private static String getBlockStr(String blockDef) {
+    if (blockDef.startsWith("N_")) {
+      return blockDef.substring("N_".length());
+    } else {
+      return blockDef;
+    }
+  }
+
+  private static boolean isNestedBlock(String blockDef) {
+    if (blockDef.startsWith("N_")) {
+      return true;
+    } else {
+      return false;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/ClassloaderInterpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/ClassloaderInterpreter.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/ClassloaderInterpreter.java
new file mode 100644
index 0000000..d3d6c1c
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/ClassloaderInterpreter.java
@@ -0,0 +1,278 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.net.URL;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.zeppelin.scheduler.Scheduler;
+
+/**
+ * Add to the classpath interpreters.
+ *
+ */
+public class ClassloaderInterpreter
+    extends Interpreter
+    implements WrappedInterpreter {
+
+  private ClassLoader cl;
+  private Interpreter intp;
+
+  public ClassloaderInterpreter(Interpreter intp, ClassLoader cl) {
+    super(new Properties());
+    this.cl = cl;
+    this.intp = intp;
+  }
+
+  @Override
+  public Interpreter getInnerInterpreter() {
+    return intp;
+  }
+
+  public ClassLoader getClassloader() {
+    return cl;
+  }
+
+  @Override
+  public InterpreterResult interpret(String st, InterpreterContext context) {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.interpret(st, context);
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+
+  @Override
+  public void open() {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      intp.open();
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public void close() {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      intp.close();
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      intp.cancel(context);
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public FormType getFormType() {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.getFormType();
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.getProgress(context);
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.getScheduler();
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.completion(buf, cursor);
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+
+  @Override
+  public String getClassName() {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.getClassName();
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      intp.setInterpreterGroup(interpreterGroup);
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public InterpreterGroup getInterpreterGroup() {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.getInterpreterGroup();
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public void setClassloaderUrls(URL [] urls) {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      intp.setClassloaderUrls(urls);
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public URL [] getClassloaderUrls() {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.getClassloaderUrls();
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public void setProperty(Properties property) {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      intp.setProperty(property);
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public Properties getProperty() {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.getProperty();
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+
+  @Override
+  public String getProperty(String key) {
+    ClassLoader oldcl = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(cl);
+    try {
+      return intp.getProperty(key);
+    } catch (Exception e) {
+      throw new InterpreterException(e);
+    } finally {
+      cl = Thread.currentThread().getContextClassLoader();
+      Thread.currentThread().setContextClassLoader(oldcl);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java
new file mode 100644
index 0000000..58dcb64
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java
@@ -0,0 +1,283 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+
+import java.net.URL;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Interface for interpreters.
+ * If you want to implement new Zeppelin interpreter, extend this class
+ *
+ * Please see,
+ * http://zeppelin.incubator.apache.org/docs/development/writingzeppelininterpreter.html
+ *
+ * open(), close(), interpreter() is three the most important method you need to implement.
+ * cancel(), getProgress(), completion() is good to have
+ * getFormType(), getScheduler() determine Zeppelin's behavior
+ *
+ */
+public abstract class Interpreter {
+
+  /**
+   * Opens interpreter. You may want to place your initialize routine here.
+   * open() is called only once
+   */
+  public abstract void open();
+
+  /**
+   * Closes interpreter. You may want to free your resources up here.
+   * close() is called only once
+   */
+  public abstract void close();
+
+  /**
+   * Run code and return result, in synchronous way.
+   *
+   * @param st statements to run
+   * @param context
+   * @return
+   */
+  public abstract InterpreterResult interpret(String st, InterpreterContext context);
+
+  /**
+   * Optionally implement the canceling routine to abort interpret() method
+   *
+   * @param context
+   */
+  public abstract void cancel(InterpreterContext context);
+
+  /**
+   * Dynamic form handling
+   * see http://zeppelin.incubator.apache.org/docs/dynamicform.html
+   *
+   * @return FormType.SIMPLE enables simple pattern replacement (eg. Hello ${name=world}),
+   *         FormType.NATIVE handles form in API
+   */
+  public abstract FormType getFormType();
+
+  /**
+   * get interpret() method running process in percentage.
+   *
+   * @param context
+   * @return number between 0-100
+   */
+  public abstract int getProgress(InterpreterContext context);
+
+  /**
+   * Get completion list based on cursor position.
+   * By implementing this method, it enables auto-completion.
+   *
+   * @param buf statements
+   * @param cursor cursor position in statements
+   * @return list of possible completion. Return empty list if there're nothing to return.
+   */
+  public abstract List<String> completion(String buf, int cursor);
+
+  /**
+   * Interpreter can implements it's own scheduler by overriding this method.
+   * There're two default scheduler provided, FIFO, Parallel.
+   * If your interpret() can handle concurrent request, use Parallel or use FIFO.
+   *
+   * You can get default scheduler by using
+   * SchedulerFactory.singleton().createOrGetFIFOScheduler()
+   * SchedulerFactory.singleton().createOrGetParallelScheduler()
+   *
+   *
+   * @return return scheduler instance.
+   *         This method can be called multiple times and have to return the same instance.
+   *         Can not return null.
+   */
+  public Scheduler getScheduler() {
+    return SchedulerFactory.singleton().createOrGetFIFOScheduler("interpreter_" + this.hashCode());
+  }
+
+  /**
+   * Called when interpreter is no longer used.
+   */
+  public void destroy() {
+    getScheduler().stop();
+  }
+
+
+
+
+
+  static Logger logger = LoggerFactory.getLogger(Interpreter.class);
+  private InterpreterGroup interpreterGroup;
+  private URL [] classloaderUrls;
+  protected Properties property;
+
+  public Interpreter(Properties property) {
+    this.property = property;
+  }
+
+  public void setProperty(Properties property) {
+    this.property = property;
+  }
+
+  public Properties getProperty() {
+    Properties p = new Properties();
+    p.putAll(property);
+
+    Map<String, InterpreterProperty> defaultProperties = Interpreter
+        .findRegisteredInterpreterByClassName(getClassName()).getProperties();
+    for (String k : defaultProperties.keySet()) {
+      if (!p.contains(k)) {
+        String value = defaultProperties.get(k).getDefaultValue();
+        if (value != null) {
+          p.put(k, defaultProperties.get(k).getDefaultValue());
+        }
+      }
+    }
+
+    return property;
+  }
+
+  public String getProperty(String key) {
+    if (property.containsKey(key)) {
+      return property.getProperty(key);
+    }
+
+    Map<String, InterpreterProperty> defaultProperties = Interpreter
+        .findRegisteredInterpreterByClassName(getClassName()).getProperties();
+    if (defaultProperties.containsKey(key)) {
+      return defaultProperties.get(key).getDefaultValue();
+    }
+
+    return null;
+  }
+
+
+  public String getClassName() {
+    return this.getClass().getName();
+  }
+
+  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
+    this.interpreterGroup = interpreterGroup;
+  }
+
+  public InterpreterGroup getInterpreterGroup() {
+    return this.interpreterGroup;
+  }
+
+  public URL[] getClassloaderUrls() {
+    return classloaderUrls;
+  }
+
+  public void setClassloaderUrls(URL[] classloaderUrls) {
+    this.classloaderUrls = classloaderUrls;
+  }
+
+
+  /**
+   * Type of interpreter.
+   */
+  public static enum FormType {
+    NATIVE, SIMPLE, NONE
+  }
+
+  /**
+   * Represent registered interpreter class
+   */
+  public static class RegisteredInterpreter {
+    private String name;
+    private String group;
+    private String className;
+    private Map<String, InterpreterProperty> properties;
+    private String path;
+
+    public RegisteredInterpreter(String name, String group, String className,
+        Map<String, InterpreterProperty> properties) {
+      super();
+      this.name = name;
+      this.group = group;
+      this.className = className;
+      this.properties = properties;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public String getGroup() {
+      return group;
+    }
+
+    public String getClassName() {
+      return className;
+    }
+
+    public Map<String, InterpreterProperty> getProperties() {
+      return properties;
+    }
+
+    public void setPath(String path) {
+      this.path = path;
+    }
+
+    public String getPath() {
+      return path;
+    }
+
+  }
+
+  /**
+   * Type of Scheduling.
+   */
+  public static enum SchedulingMode {
+    FIFO, PARALLEL
+  }
+
+  public static Map<String, RegisteredInterpreter> registeredInterpreters = Collections
+      .synchronizedMap(new HashMap<String, RegisteredInterpreter>());
+
+  public static void register(String name, String className) {
+    register(name, name, className);
+  }
+
+  public static void register(String name, String group, String className) {
+    register(name, group, className, new HashMap<String, InterpreterProperty>());
+  }
+
+  public static void register(String name, String group, String className,
+      Map<String, InterpreterProperty> properties) {
+    registeredInterpreters.put(name, new RegisteredInterpreter(name, group, className, properties));
+  }
+
+  public static RegisteredInterpreter findRegisteredInterpreterByClassName(String className) {
+    for (RegisteredInterpreter ri : registeredInterpreters.values()) {
+      if (ri.getClassName().equals(className)) {
+        return ri;
+      }
+    }
+    return null;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterContext.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterContext.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterContext.java
new file mode 100644
index 0000000..2d70c8e
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterContext.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.util.Map;
+
+import org.apache.zeppelin.display.GUI;
+
+/**
+ * Interpreter context
+ */
+public class InterpreterContext {
+  private final String paragraphTitle;
+  private final String paragraphId;
+  private final String paragraphText;
+  private final Map<String, Object> config;
+  private GUI gui;
+
+
+  public InterpreterContext(String paragraphId,
+                            String paragraphTitle,
+                            String paragraphText,
+                            Map<String, Object> config,
+                            GUI gui
+                            ) {
+    this.paragraphId = paragraphId;
+    this.paragraphTitle = paragraphTitle;
+    this.paragraphText = paragraphText;
+    this.config = config;
+    this.gui = gui;
+  }
+
+  public String getParagraphId() {
+    return paragraphId;
+  }
+
+  public String getParagraphText() {
+    return paragraphText;
+  }
+
+  public String getParagraphTitle() {
+    return paragraphTitle;
+  }
+
+  public Map<String, Object> getConfig() {
+    return config;
+  }
+
+  public GUI getGui() {
+    return gui;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterException.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterException.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterException.java
new file mode 100644
index 0000000..30c1c0a
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterException.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+/**
+ * Runtime Exception for interpreters.
+ *
+ */
+public class InterpreterException extends RuntimeException {
+
+  public InterpreterException(Throwable e) {
+    super(e);
+  }
+
+  public InterpreterException(String m) {
+    super(m);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterGroup.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterGroup.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterGroup.java
new file mode 100644
index 0000000..834630a
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterGroup.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.util.LinkedList;
+import java.util.Properties;
+import java.util.Random;
+
+/**
+ * InterpreterGroup is list of interpreters in the same group.
+ * And unit of interpreter instantiate, restart, bind, unbind.
+ */
+public class InterpreterGroup extends LinkedList<Interpreter>{
+  String id;
+
+  private static String generateId() {
+    return "InterpreterGroup_" + System.currentTimeMillis() + "_"
+           + new Random().nextInt();
+  }
+
+  public String getId() {
+    synchronized (this) {
+      if (id == null) {
+        id = generateId();
+      }
+      return id;
+    }
+  }
+
+
+  public Properties getProperty() {
+    Properties p = new Properties();
+    for (Interpreter intp : this) {
+      p.putAll(intp.getProperty());
+    }
+    return p;
+  }
+
+  public void close() {
+    for (Interpreter intp : this) {
+      intp.close();
+    }
+  }
+
+  public void destroy() {
+    for (Interpreter intp : this) {
+      intp.destroy();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterProperty.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterProperty.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterProperty.java
new file mode 100644
index 0000000..cc13ace
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterProperty.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+/**
+ * Represent property of interpreter
+ */
+public class InterpreterProperty {
+  String defaultValue;
+  String description;
+
+  public InterpreterProperty(String defaultValue,
+      String description) {
+    super();
+    this.defaultValue = defaultValue;
+    this.description = description;
+  }
+
+  public String getDefaultValue() {
+    return defaultValue;
+  }
+
+  public void setDefaultValue(String defaultValue) {
+    this.defaultValue = defaultValue;
+  }
+
+  public String getDescription() {
+    return description;
+  }
+
+  public void setDescription(String description) {
+    this.description = description;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterPropertyBuilder.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterPropertyBuilder.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterPropertyBuilder.java
new file mode 100644
index 0000000..f077b4e
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterPropertyBuilder.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * InterpreterPropertyBuilder
+ */
+public class InterpreterPropertyBuilder {
+  Map<String, InterpreterProperty> properties = new HashMap<String, InterpreterProperty>();
+
+  public InterpreterPropertyBuilder add(String name, String defaultValue, String description){
+    properties.put(name, new InterpreterProperty(defaultValue, description));
+    return this;
+  }
+
+  public Map<String, InterpreterProperty> build(){
+    return properties;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResult.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResult.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResult.java
new file mode 100644
index 0000000..0659a47
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterResult.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.io.Serializable;
+
+/**
+ * Interpreter result template.
+ *
+ * @author Leemoonsoo
+ *
+ */
+public class InterpreterResult implements Serializable {
+
+  /**
+   *  Type of result after code execution.
+   *
+   * @author Leemoonsoo
+   *
+   */
+  public static enum Code {
+    SUCCESS,
+    INCOMPLETE,
+    ERROR
+  }
+
+  /**
+   * Type of Data.
+   *
+   * @author Leemoonsoo
+   *
+   */
+  public static enum Type {
+    TEXT,
+    HTML,
+    TABLE,
+    IMG,
+    SVG,
+    NULL
+  }
+
+  Code code;
+  Type type;
+  String msg;
+
+  public InterpreterResult(Code code) {
+    this.code = code;
+    this.msg = null;
+    this.type = Type.TEXT;
+  }
+
+  public InterpreterResult(Code code, String msg) {
+    this.code = code;
+    this.msg = getData(msg);
+    this.type = getType(msg);
+  }
+
+  public InterpreterResult(Code code, Type type, String msg) {
+    this.code = code;
+    this.msg = msg;
+    this.type = type;
+  }
+
+  /**
+   * Magic is like %html %text.
+   *
+   * @param msg
+   * @return
+   */
+  private String getData(String msg) {
+    if (msg == null) {
+      return null;
+    }
+
+    Type[] types = Type.values();
+    for (Type t : types) {
+      String magic = "%" + t.name().toLowerCase();
+      if (msg.startsWith(magic + " ") || msg.startsWith(magic + "\n")) {
+        int magicLength = magic.length() + 1;
+        if (msg.length() > magicLength) {
+          return msg.substring(magicLength);
+        } else {
+          return "";
+        }
+      }
+    }
+
+    return msg;
+  }
+
+
+  private Type getType(String msg) {
+    if (msg == null) {
+      return Type.TEXT;
+    }
+    Type[] types = Type.values();
+    for (Type t : types) {
+      String magic = "%" + t.name().toLowerCase();
+      if (msg.startsWith(magic + " ") || msg.startsWith(magic + "\n")) {
+        return t;
+      }
+    }
+    return Type.TEXT;
+  }
+
+  public Code code() {
+    return code;
+  }
+
+  public String message() {
+    return msg;
+  }
+
+  public Type type() {
+    return type;
+  }
+
+  public InterpreterResult type(Type type) {
+    this.type = type;
+    return this;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterUtils.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterUtils.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterUtils.java
new file mode 100644
index 0000000..c3d3b9e
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterUtils.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.zeppelin.interpreter;
+
+import java.lang.reflect.InvocationTargetException;
+
+/**
+ * Interpreter utility functions
+ */
+public class InterpreterUtils {
+
+  public static String getMostRelevantMessage(Exception ex) {
+    if (ex instanceof InvocationTargetException) {
+      Throwable cause = ((InvocationTargetException) ex).getCause();
+      if (cause != null) {
+        return cause.getMessage();
+      }
+    }
+    return ex.getMessage();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java
new file mode 100644
index 0000000..599a24a
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+import java.net.URL;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.zeppelin.scheduler.Scheduler;
+
+/**
+ * Interpreter wrapper for lazy initialization
+ */
+public class LazyOpenInterpreter
+    extends Interpreter
+    implements WrappedInterpreter {
+  private Interpreter intp;
+  boolean opened = false;
+
+  public LazyOpenInterpreter(Interpreter intp) {
+    super(new Properties());
+    this.intp = intp;
+  }
+
+  @Override
+  public Interpreter getInnerInterpreter() {
+    return intp;
+  }
+
+  @Override
+  public void setProperty(Properties property) {
+    intp.setProperty(property);
+  }
+
+  @Override
+  public Properties getProperty() {
+    return intp.getProperty();
+  }
+
+  @Override
+  public String getProperty(String key) {
+    return intp.getProperty(key);
+  }
+
+  @Override
+  public void open() {
+    if (opened == true) {
+      return;
+    }
+
+    synchronized (intp) {
+      if (opened == false) {
+        intp.open();
+        opened = true;
+      }
+    }
+  }
+
+  @Override
+  public void close() {
+    synchronized (intp) {
+      if (opened == true) {
+        intp.close();
+        opened = false;
+      }
+    }
+  }
+
+  public boolean isOpen() {
+    synchronized (intp) {
+      return opened;
+    }
+  }
+
+  @Override
+  public InterpreterResult interpret(String st, InterpreterContext context) {
+    open();
+    return intp.interpret(st, context);
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {
+    open();
+    intp.cancel(context);
+  }
+
+  @Override
+  public FormType getFormType() {
+    return intp.getFormType();
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    open();
+    return intp.getProgress(context);
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    return intp.getScheduler();
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    open();
+    return intp.completion(buf, cursor);
+  }
+
+  @Override
+  public String getClassName() {
+    return intp.getClassName();
+  }
+
+  @Override
+  public InterpreterGroup getInterpreterGroup() {
+    return intp.getInterpreterGroup();
+  }
+
+  @Override
+  public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
+    intp.setInterpreterGroup(interpreterGroup);
+  }
+
+  @Override
+  public URL [] getClassloaderUrls() {
+    return intp.getClassloaderUrls();
+  }
+
+  @Override
+  public void setClassloaderUrls(URL [] urls) {
+    intp.setClassloaderUrls(urls);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/WrappedInterpreter.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/WrappedInterpreter.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/WrappedInterpreter.java
new file mode 100644
index 0000000..a12a9aa
--- /dev/null
+++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/WrappedInterpreter.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter;
+
+/**
+ * WrappedInterpreter
+ */
+public interface WrappedInterpreter {
+  public Interpreter getInnerInterpreter();
+}


[06/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java
new file mode 100644
index 0000000..58299bb
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java
@@ -0,0 +1,446 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.thrift.transport.TTransportException;
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreter;
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess;
+import org.apache.zeppelin.interpreter.remote.mock.MockInterpreterA;
+import org.apache.zeppelin.interpreter.remote.mock.MockInterpreterB;
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.Job.Status;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class RemoteInterpreterTest {
+
+
+  private InterpreterGroup intpGroup;
+  private HashMap<String, String> env;
+
+  @Before
+  public void setUp() throws Exception {
+    intpGroup = new InterpreterGroup();
+    env = new HashMap<String, String>();
+    env.put("ZEPPELIN_CLASSPATH", new File("./target/test-classes").getAbsolutePath());
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    intpGroup.clone();
+    intpGroup.destroy();
+  }
+
+  @Test
+  public void testRemoteInterperterCall() throws TTransportException, IOException {
+    Properties p = new Properties();
+
+    RemoteInterpreter intpA = new RemoteInterpreter(
+        p,
+        MockInterpreterA.class.getName(),
+        new File("../bin/interpreter.sh").getAbsolutePath(),
+        "fake",
+        env
+        );
+
+    intpGroup.add(intpA);
+    intpA.setInterpreterGroup(intpGroup);
+
+    RemoteInterpreter intpB = new RemoteInterpreter(
+        p,
+        MockInterpreterB.class.getName(),
+        new File("../bin/interpreter.sh").getAbsolutePath(),
+        "fake",
+        env
+        );
+
+    intpGroup.add(intpB);
+    intpB.setInterpreterGroup(intpGroup);
+
+
+    RemoteInterpreterProcess process = intpA.getInterpreterProcess();
+    process.equals(intpB.getInterpreterProcess());
+
+    assertFalse(process.isRunning());
+    assertEquals(0, process.getNumIdleClient());
+    assertEquals(0, process.referenceCount());
+
+    intpA.open();
+    assertTrue(process.isRunning());
+    assertEquals(1, process.getNumIdleClient());
+    assertEquals(1, process.referenceCount());
+
+    intpA.interpret("1",
+        new InterpreterContext(
+            "id",
+            "title",
+            "text",
+            new HashMap<String, Object>(),
+            new GUI()));
+
+    intpB.open();
+    assertEquals(2, process.referenceCount());
+
+    intpA.close();
+    assertEquals(1, process.referenceCount());
+    intpB.close();
+    assertEquals(0, process.referenceCount());
+
+    assertFalse(process.isRunning());
+
+  }
+
+  @Test
+  public void testRemoteSchedulerSharing() throws TTransportException, IOException {
+    Properties p = new Properties();
+
+    RemoteInterpreter intpA = new RemoteInterpreter(
+        p,
+        MockInterpreterA.class.getName(),
+        new File("../bin/interpreter.sh").getAbsolutePath(),
+        "fake",
+        env
+        );
+
+    intpGroup.add(intpA);
+    intpA.setInterpreterGroup(intpGroup);
+
+    RemoteInterpreter intpB = new RemoteInterpreter(
+        p,
+        MockInterpreterB.class.getName(),
+        new File("../bin/interpreter.sh").getAbsolutePath(),
+        "fake",
+        env
+        );
+
+    intpGroup.add(intpB);
+    intpB.setInterpreterGroup(intpGroup);
+
+    intpA.open();
+    intpB.open();
+
+    long start = System.currentTimeMillis();
+    InterpreterResult ret = intpA.interpret("500",
+        new InterpreterContext(
+            "id",
+            "title",
+            "text",
+            new HashMap<String, Object>(),
+            new GUI()));
+    assertEquals("500", ret.message());
+
+    ret = intpB.interpret("500",
+        new InterpreterContext(
+            "id",
+            "title",
+            "text",
+            new HashMap<String, Object>(),
+            new GUI()));
+    assertEquals("1000", ret.message());
+    long end = System.currentTimeMillis();
+    assertTrue(end - start >= 1000);
+
+
+    intpA.close();
+    intpB.close();
+
+    RemoteInterpreterProcess process = intpA.getInterpreterProcess();
+    assertFalse(process.isRunning());
+  }
+
+  @Test
+  public void testRemoteSchedulerSharingSubmit() throws TTransportException, IOException, InterruptedException {
+    Properties p = new Properties();
+
+    final RemoteInterpreter intpA = new RemoteInterpreter(
+        p,
+        MockInterpreterA.class.getName(),
+        new File("../bin/interpreter.sh").getAbsolutePath(),
+        "fake",
+        env
+        );
+
+    intpGroup.add(intpA);
+    intpA.setInterpreterGroup(intpGroup);
+
+    final RemoteInterpreter intpB = new RemoteInterpreter(
+        p,
+        MockInterpreterB.class.getName(),
+        new File("../bin/interpreter.sh").getAbsolutePath(),
+        "fake",
+        env
+        );
+
+    intpGroup.add(intpB);
+    intpB.setInterpreterGroup(intpGroup);
+
+    intpA.open();
+    intpB.open();
+
+    long start = System.currentTimeMillis();
+    Job jobA = new Job("jobA", null) {
+
+      @Override
+      public int progress() {
+        return 0;
+      }
+
+      @Override
+      public Map<String, Object> info() {
+        return null;
+      }
+
+      @Override
+      protected Object jobRun() throws Throwable {
+        return intpA.interpret("500",
+            new InterpreterContext(
+                "jobA",
+                "title",
+                "text",
+                new HashMap<String, Object>(),
+                new GUI()));
+      }
+
+      @Override
+      protected boolean jobAbort() {
+        return false;
+      }
+
+    };
+    intpA.getScheduler().submit(jobA);
+
+    Job jobB = new Job("jobB", null) {
+
+      @Override
+      public int progress() {
+        return 0;
+      }
+
+      @Override
+      public Map<String, Object> info() {
+        return null;
+      }
+
+      @Override
+      protected Object jobRun() throws Throwable {
+        return intpB.interpret("500",
+            new InterpreterContext(
+                "jobB",
+                "title",
+                "text",
+                new HashMap<String, Object>(),
+                new GUI()));
+      }
+
+      @Override
+      protected boolean jobAbort() {
+        return false;
+      }
+
+    };
+    intpB.getScheduler().submit(jobB);
+
+    // wait until both job finished
+    while (jobA.getStatus() != Status.FINISHED ||
+           jobB.getStatus() != Status.FINISHED) {
+      Thread.sleep(100);
+    }
+
+    long end = System.currentTimeMillis();
+    assertTrue(end - start >= 1000);
+
+    assertEquals("1000", ((InterpreterResult) jobB.getReturn()).message());
+
+    intpA.close();
+    intpB.close();
+
+    RemoteInterpreterProcess process = intpA.getInterpreterProcess();
+    assertFalse(process.isRunning());
+  }
+
+  @Test
+  public void testRunOrderPreserved() throws InterruptedException {
+    Properties p = new Properties();
+
+    final RemoteInterpreter intpA = new RemoteInterpreter(
+        p,
+        MockInterpreterA.class.getName(),
+        new File("../bin/interpreter.sh").getAbsolutePath(),
+        "fake",
+        env
+        );
+
+    intpGroup.add(intpA);
+    intpA.setInterpreterGroup(intpGroup);
+
+    intpA.open();
+
+    int concurrency = 3;
+    final List<String> results = new LinkedList<String>();
+
+    Scheduler scheduler = intpA.getScheduler();
+    for (int i = 0; i < concurrency; i++) {
+      final String jobId = Integer.toString(i);
+      scheduler.submit(new Job(jobId, Integer.toString(i), null, 200) {
+
+        @Override
+        public int progress() {
+          return 0;
+        }
+
+        @Override
+        public Map<String, Object> info() {
+          return null;
+        }
+
+        @Override
+        protected Object jobRun() throws Throwable {
+          InterpreterResult ret = intpA.interpret(getJobName(), new InterpreterContext(
+              jobId,
+              "title",
+              "text",
+              new HashMap<String, Object>(),
+              new GUI()));
+
+          synchronized (results) {
+            results.add(ret.message());
+            results.notify();
+          }
+          return null;
+        }
+
+        @Override
+        protected boolean jobAbort() {
+          return false;
+        }
+
+      });
+    }
+
+    // wait for job finished
+    synchronized (results) {
+      while (results.size() != concurrency) {
+        results.wait(300);
+      }
+    }
+
+    int i = 0;
+    for (String result : results) {
+      assertEquals(Integer.toString(i++), result);
+    }
+    assertEquals(concurrency, i);
+
+    intpA.close();
+  }
+
+
+  @Test
+  public void testRunParallel() throws InterruptedException {
+    Properties p = new Properties();
+    p.put("parallel", "true");
+
+    final RemoteInterpreter intpA = new RemoteInterpreter(
+        p,
+        MockInterpreterA.class.getName(),
+        new File("../bin/interpreter.sh").getAbsolutePath(),
+        "fake",
+        env
+        );
+
+    intpGroup.add(intpA);
+    intpA.setInterpreterGroup(intpGroup);
+
+    intpA.open();
+
+    int concurrency = 4;
+    final int timeToSleep = 1000;
+    final List<String> results = new LinkedList<String>();
+    long start = System.currentTimeMillis();
+
+    Scheduler scheduler = intpA.getScheduler();
+    for (int i = 0; i < concurrency; i++) {
+      final String jobId = Integer.toString(i);
+      scheduler.submit(new Job(jobId, Integer.toString(i), null, 300) {
+
+        @Override
+        public int progress() {
+          return 0;
+        }
+
+        @Override
+        public Map<String, Object> info() {
+          return null;
+        }
+
+        @Override
+        protected Object jobRun() throws Throwable {
+          String stmt = Integer.toString(timeToSleep);
+          InterpreterResult ret = intpA.interpret(stmt, new InterpreterContext(
+              jobId,
+              "title",
+              "text",
+              new HashMap<String, Object>(),
+              new GUI()));
+
+          synchronized (results) {
+            results.add(ret.message());
+            results.notify();
+          }
+          return stmt;
+        }
+
+        @Override
+        protected boolean jobAbort() {
+          return false;
+        }
+
+      });
+    }
+
+    // wait for job finished
+    synchronized (results) {
+      while (results.size() != concurrency) {
+        results.wait(300);
+      }
+    }
+
+    long end = System.currentTimeMillis();
+
+    assertTrue(end - start < timeToSleep * concurrency);
+
+    intpA.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtilsTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtilsTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtilsTest.java
new file mode 100644
index 0000000..975d6ea
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterUtilsTest.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreterUtils;
+import org.junit.Test;
+
+public class RemoteInterpreterUtilsTest {
+
+  @Test
+  public void testFindRandomAvailablePortOnAllLocalInterfaces() throws IOException {
+    assertTrue(RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces() > 0);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/mock/MockInterpreterA.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/mock/MockInterpreterA.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/mock/MockInterpreterA.java
new file mode 100644
index 0000000..51f3c2c
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/mock/MockInterpreterA.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote.mock;
+
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+
+public class MockInterpreterA extends Interpreter {
+  static {
+    Interpreter.register(
+        "interpreterA",
+        "group1",
+        MockInterpreterA.class.getName(),
+        new InterpreterPropertyBuilder()
+            .add("p1", "v1", "property1").build());
+
+  }
+
+  private String lastSt;
+
+  public MockInterpreterA(Properties property) {
+    super(property);
+  }
+
+  @Override
+  public void open() {
+    //new RuntimeException().printStackTrace();
+  }
+
+  @Override
+  public void close() {
+  }
+
+  public String getLastStatement() {
+    return lastSt;
+  }
+
+  @Override
+  public InterpreterResult interpret(String st, InterpreterContext context) {
+    try {
+      Thread.sleep(Long.parseLong(st));
+      this.lastSt = st;
+    } catch (NumberFormatException | InterruptedException e) {
+      throw new InterpreterException(e);
+    }
+    return new InterpreterResult(Code.SUCCESS, st);
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {
+
+  }
+
+  @Override
+  public FormType getFormType() {
+    return FormType.NATIVE;
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    return 0;
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    return null;
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    if (getProperty("parallel") != null && getProperty("parallel").equals("true")) {
+      return SchedulerFactory.singleton().createOrGetParallelScheduler("interpreter_" + this.hashCode(), 10);
+    } else {
+      return SchedulerFactory.singleton().createOrGetFIFOScheduler("interpreter_" + this.hashCode());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/mock/MockInterpreterB.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/mock/MockInterpreterB.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/mock/MockInterpreterB.java
new file mode 100644
index 0000000..c7097f2
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/remote/mock/MockInterpreterB.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.remote.mock;
+
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterException;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.interpreter.WrappedInterpreter;
+import org.apache.zeppelin.scheduler.Scheduler;
+
+public class MockInterpreterB extends Interpreter {
+  static {
+    Interpreter.register(
+        "interpreterB",
+        "group1",
+        MockInterpreterA.class.getName(),
+        new InterpreterPropertyBuilder()
+            .add("p1", "v1", "property1").build());
+
+  }
+  public MockInterpreterB(Properties property) {
+    super(property);
+  }
+
+  @Override
+  public void open() {
+    //new RuntimeException().printStackTrace();
+  }
+
+  @Override
+  public void close() {
+  }
+
+  @Override
+  public InterpreterResult interpret(String st, InterpreterContext context) {
+    MockInterpreterA intpA = getInterpreterA();
+    String intpASt = intpA.getLastStatement();
+    long timeToSleep = Long.parseLong(st);
+    if (intpASt != null) {
+      timeToSleep += Long.parseLong(intpASt);
+    }
+    try {
+      Thread.sleep(timeToSleep);
+    } catch (NumberFormatException | InterruptedException e) {
+      throw new InterpreterException(e);
+    }
+    return new InterpreterResult(Code.SUCCESS, Long.toString(timeToSleep));
+  }
+
+  @Override
+  public void cancel(InterpreterContext context) {
+
+  }
+
+  @Override
+  public FormType getFormType() {
+    return FormType.NATIVE;
+  }
+
+  @Override
+  public int getProgress(InterpreterContext context) {
+    return 0;
+  }
+
+  @Override
+  public List<String> completion(String buf, int cursor) {
+    return null;
+  }
+
+  public MockInterpreterA getInterpreterA() {
+    InterpreterGroup interpreterGroup = getInterpreterGroup();
+    for (Interpreter intp : interpreterGroup) {
+      if (intp.getClassName().equals(MockInterpreterA.class.getName())) {
+        Interpreter p = intp;
+        while (p instanceof WrappedInterpreter) {
+          p = ((WrappedInterpreter) p).getInnerInterpreter();
+        }
+        return (MockInterpreterA) p;
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public Scheduler getScheduler() {
+    InterpreterGroup interpreterGroup = getInterpreterGroup();
+    for (Interpreter intp : interpreterGroup) {
+      if (intp.getClassName().equals(MockInterpreterA.class.getName())) {
+        return intp.getScheduler();
+      }
+    }
+
+    return null;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/FIFOSchedulerTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/FIFOSchedulerTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/FIFOSchedulerTest.java
new file mode 100644
index 0000000..3d8495c
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/FIFOSchedulerTest.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.apache.zeppelin.scheduler.Job.Status;
+
+import junit.framework.TestCase;
+
+public class FIFOSchedulerTest extends TestCase {
+
+	private SchedulerFactory schedulerSvc;
+
+	@Override
+  public void setUp() throws Exception{
+		schedulerSvc = new SchedulerFactory();
+	}
+
+	@Override
+  public void tearDown(){
+
+	}
+
+	public void testRun() throws InterruptedException{
+		Scheduler s = schedulerSvc.createOrGetFIFOScheduler("test");
+		assertEquals(0, s.getJobsRunning().size());
+		assertEquals(0, s.getJobsWaiting().size());
+
+		Job job1 = new SleepingJob("job1", null, 500);
+		Job job2 = new SleepingJob("job2", null, 500);
+
+		s.submit(job1);
+		s.submit(job2);
+		Thread.sleep(200);
+
+		assertEquals(Status.RUNNING, job1.getStatus());
+		assertEquals(Status.PENDING, job2.getStatus());
+		assertEquals(1, s.getJobsRunning().size());
+		assertEquals(1, s.getJobsWaiting().size());
+
+
+		Thread.sleep(500);
+		assertEquals(Status.FINISHED, job1.getStatus());
+		assertEquals(Status.RUNNING, job2.getStatus());
+		assertTrue((500 < (Long)job1.getReturn()));
+		assertEquals(1, s.getJobsRunning().size());
+		assertEquals(0, s.getJobsWaiting().size());
+
+	}
+
+	public void testAbort() throws InterruptedException{
+		Scheduler s = schedulerSvc.createOrGetFIFOScheduler("test");
+		assertEquals(0, s.getJobsRunning().size());
+		assertEquals(0, s.getJobsWaiting().size());
+
+		Job job1 = new SleepingJob("job1", null, 500);
+		Job job2 = new SleepingJob("job2", null, 500);
+
+		s.submit(job1);
+		s.submit(job2);
+
+		Thread.sleep(200);
+
+		job1.abort();
+		job2.abort();
+
+		Thread.sleep(200);
+
+		assertEquals(Status.ABORT, job1.getStatus());
+		assertEquals(Status.ABORT, job2.getStatus());
+
+		assertTrue((500 > (Long)job1.getReturn()));
+		assertEquals(null, job2.getReturn());
+
+
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/ParallelSchedulerTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/ParallelSchedulerTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/ParallelSchedulerTest.java
new file mode 100644
index 0000000..682f283
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/ParallelSchedulerTest.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.apache.zeppelin.scheduler.Job.Status;
+
+import junit.framework.TestCase;
+public class ParallelSchedulerTest extends TestCase {
+
+	private SchedulerFactory schedulerSvc;
+
+	@Override
+  public void setUp() throws Exception{
+		schedulerSvc = new SchedulerFactory();
+	}
+
+	@Override
+  public void tearDown(){
+
+	}
+
+	public void testRun() throws InterruptedException{
+		Scheduler s = schedulerSvc.createOrGetParallelScheduler("test", 2);
+		assertEquals(0, s.getJobsRunning().size());
+		assertEquals(0, s.getJobsWaiting().size());
+
+		Job job1 = new SleepingJob("job1", null, 500);
+		Job job2 = new SleepingJob("job2", null, 500);
+		Job job3 = new SleepingJob("job3", null, 500);
+
+		s.submit(job1);
+		s.submit(job2);
+		s.submit(job3);
+		Thread.sleep(200);
+
+		assertEquals(Status.RUNNING, job1.getStatus());
+		assertEquals(Status.RUNNING, job2.getStatus());
+		assertEquals(Status.PENDING, job3.getStatus());
+		assertEquals(2, s.getJobsRunning().size());
+		assertEquals(1, s.getJobsWaiting().size());
+
+		Thread.sleep(500);
+
+		assertEquals(Status.FINISHED, job1.getStatus());
+		assertEquals(Status.FINISHED, job2.getStatus());
+		assertEquals(Status.RUNNING, job3.getStatus());
+		assertEquals(1, s.getJobsRunning().size());
+		assertEquals(0, s.getJobsWaiting().size());
+
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/RemoteSchedulerTest.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/RemoteSchedulerTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/RemoteSchedulerTest.java
new file mode 100644
index 0000000..2c13ab2
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/RemoteSchedulerTest.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.remote.RemoteInterpreter;
+import org.apache.zeppelin.interpreter.remote.mock.MockInterpreterA;
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class RemoteSchedulerTest {
+
+  private SchedulerFactory schedulerSvc;
+
+  @Before
+  public void setUp() throws Exception{
+    schedulerSvc = new SchedulerFactory();
+  }
+
+  @After
+  public void tearDown(){
+
+  }
+
+  @Test
+  public void test() throws Exception {
+    Properties p = new Properties();
+    InterpreterGroup intpGroup = new InterpreterGroup();
+    Map<String, String> env = new HashMap<String, String>();
+    env.put("ZEPPELIN_CLASSPATH", new File("./target/test-classes").getAbsolutePath());
+
+    final RemoteInterpreter intpA = new RemoteInterpreter(
+        p,
+        MockInterpreterA.class.getName(),
+        new File("../bin/interpreter.sh").getAbsolutePath(),
+        "fake",
+        env
+        );
+
+    intpGroup.add(intpA);
+    intpA.setInterpreterGroup(intpGroup);
+
+    intpA.open();
+
+    Scheduler scheduler = schedulerSvc.createOrGetRemoteScheduler("test",
+        intpA.getInterpreterProcess(),
+        10);
+
+    Job job = new Job("jobId", "jobName", null, 200) {
+
+      @Override
+      public int progress() {
+        return 0;
+      }
+
+      @Override
+      public Map<String, Object> info() {
+        return null;
+      }
+
+      @Override
+      protected Object jobRun() throws Throwable {
+        intpA.interpret("1000", new InterpreterContext(
+            "jobId",
+            "title",
+            "text",
+            new HashMap<String, Object>(),
+            new GUI()));
+        return "1000";
+      }
+
+      @Override
+      protected boolean jobAbort() {
+        return false;
+      }
+    };
+    scheduler.submit(job);
+
+    while (job.isRunning() == false) {
+      Thread.sleep(100);
+    }
+
+    Thread.sleep(500);
+    assertEquals(0, scheduler.getJobsWaiting().size());
+    assertEquals(1, scheduler.getJobsRunning().size());
+
+    Thread.sleep(500);
+
+    assertEquals(0, scheduler.getJobsWaiting().size());
+    assertEquals(0, scheduler.getJobsRunning().size());
+
+    intpA.close();
+    schedulerSvc.removeScheduler("test");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/SleepingJob.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/SleepingJob.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/SleepingJob.java
new file mode 100644
index 0000000..15f86d7
--- /dev/null
+++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/scheduler/SleepingJob.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.scheduler;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.zeppelin.scheduler.Job;
+import org.apache.zeppelin.scheduler.JobListener;
+
+public class SleepingJob extends Job{
+
+	private int time;
+	boolean abort = false;
+	private long start;
+	private int count;
+
+
+	public SleepingJob(String jobName, JobListener listener, int time){
+		super(jobName, listener);
+		this.time = time;
+		count = 0;
+	}
+	@Override
+  public Object jobRun() {
+		start = System.currentTimeMillis();
+		while(abort==false){
+			count++;
+			try {
+				Thread.sleep(10);
+			} catch (InterruptedException e) {
+			}
+			if(System.currentTimeMillis() - start>time) break;
+		}
+		return System.currentTimeMillis()-start;
+	}
+
+	@Override
+  public boolean jobAbort() {
+		abort = true;
+		return true;
+	}
+
+	@Override
+  public int progress() {
+		long p = (System.currentTimeMillis() - start)*100 / time;
+		if(p<0) p = 0;
+		if(p>100) p = 100;
+		return (int) p;
+	}
+
+	@Override
+  public Map<String, Object> info() {
+		Map<String, Object> i = new HashMap<String, Object>();
+		i.put("LoopCount", Integer.toString(count));
+		return i;
+	}
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/test/resources/log4j.properties b/zeppelin-interpreter/src/test/resources/log4j.properties
index 361ca2d..d8a7839 100644
--- a/zeppelin-interpreter/src/test/resources/log4j.properties
+++ b/zeppelin-interpreter/src/test/resources/log4j.properties
@@ -1,3 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 # Direct log messages to stdout
 log4j.appender.stdout=org.apache.log4j.ConsoleAppender
 log4j.appender.stdout.Target=System.out

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml
index b0b3e1f..4b2b4d9 100644
--- a/zeppelin-server/pom.xml
+++ b/zeppelin-server/pom.xml
@@ -1,14 +1,31 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
 
-  <groupId>com.nflabs.zeppelin</groupId>
+  <groupId>org.apache.zeppelin</groupId>
   <artifactId>zeppelin-server</artifactId>
   <packaging>jar</packaging>
   <version>0.5.0-SNAPSHOT</version>
@@ -271,6 +288,26 @@
 
   <build>
     <plugins>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>**/.idea/</exclude>
+            <exclude>**/*.iml</exclude>
+            <exclude>.git/</exclude>
+            <exclude>.gitignore</exclude>
+            <exclude>**/.settings/*</exclude>
+            <exclude>**/.classpath</exclude>
+            <exclude>**/.project</exclude>
+            <exclude>**/target/**</exclude>
+            <exclude>**/derby.log</exclude>
+            <exclude>**/metastore_db/</exclude>
+            <exclude>**/README.md</exclude>
+            <exclude>src/test/java/com/webautomation/*</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
 
       <plugin>
         <artifactId>maven-failsafe-plugin</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/InterpreterRestApi.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/InterpreterRestApi.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/InterpreterRestApi.java
deleted file mode 100644
index 582ba32..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/InterpreterRestApi.java
+++ /dev/null
@@ -1,152 +0,0 @@
-package com.nflabs.zeppelin.rest;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.Gson;
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.Interpreter.RegisteredInterpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterException;
-import com.nflabs.zeppelin.interpreter.InterpreterFactory;
-import com.nflabs.zeppelin.interpreter.InterpreterSetting;
-import com.nflabs.zeppelin.rest.message.NewInterpreterSettingRequest;
-import com.nflabs.zeppelin.rest.message.UpdateInterpreterSettingRequest;
-import com.nflabs.zeppelin.server.JsonResponse;
-import com.wordnik.swagger.annotations.Api;
-import com.wordnik.swagger.annotations.ApiOperation;
-import com.wordnik.swagger.annotations.ApiResponse;
-import com.wordnik.swagger.annotations.ApiResponses;
-
-/**
- * Interpreter Rest API
- *
- */
-@Path("/interpreter")
-@Produces("application/json")
-@Api(value = "/interpreter", description = "Zeppelin Interpreter REST API")
-public class InterpreterRestApi {
-  Logger logger = LoggerFactory.getLogger(InterpreterRestApi.class);
-
-  private InterpreterFactory interpreterFactory;
-
-  Gson gson = new Gson();
-
-  public InterpreterRestApi() {
-
-  }
-
-  public InterpreterRestApi(InterpreterFactory interpreterFactory) {
-    this.interpreterFactory = interpreterFactory;
-  }
-
-  /**
-   * List all interpreter settings
-     * @return
-   */
-  @GET
-  @Path("setting")
-  @ApiOperation(httpMethod = "GET", value = "List all interpreter setting")
-  @ApiResponses(value = {@ApiResponse(code = 500, message = "When something goes wrong")})
-  public Response listSettings() {
-    List<InterpreterSetting> interpreterSettings = null;
-    interpreterSettings = interpreterFactory.get();
-    return new JsonResponse(Status.OK, "", interpreterSettings).build();
-  }
-
-  /**
-   * Add new interpreter setting
-   * @param message
-   * @return
-   * @throws IOException
-   * @throws InterpreterException
-   */
-  @POST
-  @Path("setting")
-  @ApiOperation(httpMethod = "GET", value = "Create new interpreter setting")
-  @ApiResponses(value = {@ApiResponse(code = 201, message = "On success")})
-  public Response newSettings(String message) throws InterpreterException, IOException {
-    NewInterpreterSettingRequest request = gson.fromJson(message,
-        NewInterpreterSettingRequest.class);
-    Properties p = new Properties();
-    p.putAll(request.getProperties());
-    interpreterFactory.add(request.getName(), request.getGroup(), request.getOption(), p);
-    return new JsonResponse(Status.CREATED, "").build();
-  }
-
-  @PUT
-  @Path("setting/{settingId}")
-  public Response updateSetting(String message, @PathParam("settingId") String settingId) {
-    logger.info("Update interpreterSetting {}", settingId);
-
-    try {
-      UpdateInterpreterSettingRequest p = gson.fromJson(message,
-          UpdateInterpreterSettingRequest.class);
-      interpreterFactory.setPropertyAndRestart(settingId, p.getOption(), p.getProperties());
-    } catch (InterpreterException e) {
-      return new JsonResponse(Status.NOT_FOUND, e.getMessage(), e).build();
-    } catch (IOException e) {
-      return new JsonResponse(Status.INTERNAL_SERVER_ERROR, e.getMessage(), e).build();
-    }
-    InterpreterSetting setting = interpreterFactory.get(settingId);
-    if (setting == null) {
-      return new JsonResponse(Status.NOT_FOUND, "", settingId).build();
-    }
-    return new JsonResponse(Status.OK, "", setting).build();
-  }
-
-  @DELETE
-  @Path("setting/{settingId}")
-  @ApiOperation(httpMethod = "GET", value = "Remove interpreter setting")
-  @ApiResponses(value = {@ApiResponse(code = 500, message = "When something goes wrong")})
-  public Response removeSetting(@PathParam("settingId") String settingId) throws IOException {
-    logger.info("Remove interpreterSetting {}", settingId);
-    interpreterFactory.remove(settingId);
-    return new JsonResponse(Status.OK).build();
-  }
-
-  @PUT
-  @Path("setting/restart/{settingId}")
-  @ApiOperation(httpMethod = "GET", value = "restart interpreter setting")
-  @ApiResponses(value = {
-      @ApiResponse(code = 404, message = "Not found")})
-  public Response restartSetting(@PathParam("settingId") String settingId) {
-    logger.info("Restart interpreterSetting {}", settingId);
-    try {
-      interpreterFactory.restart(settingId);
-    } catch (InterpreterException e) {
-      return new JsonResponse(Status.NOT_FOUND, e.getMessage(), e).build();
-    }
-    InterpreterSetting setting = interpreterFactory.get(settingId);
-    if (setting == null) {
-      return new JsonResponse(Status.NOT_FOUND, "", settingId).build();
-    }
-    return new JsonResponse(Status.OK, "", setting).build();
-  }
-
-  /**
-   * List all available interpreters by group
-   */
-  @GET
-  @ApiOperation(httpMethod = "GET", value = "List all available interpreters")
-  @ApiResponses(value = {
-      @ApiResponse(code = 500, message = "When something goes wrong")})
-  public Response listInterpreter(String message) {
-    Map<String, RegisteredInterpreter> m = Interpreter.registeredInterpreters;
-    return new JsonResponse(Status.OK, "", m).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/NotebookResponse.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/NotebookResponse.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/NotebookResponse.java
deleted file mode 100644
index aa92577..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/NotebookResponse.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.nflabs.zeppelin.rest;
-
-import javax.xml.bind.annotation.XmlRootElement;
-
-/**
- * Response wrapper.
- * 
- * @author anthonycorbacho
- *
- */
-@XmlRootElement
-public class NotebookResponse {
-  private String msg;
-
-  public NotebookResponse() {}
-
-  public NotebookResponse(String msg) {
-    this.msg = msg;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/NotebookRestApi.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/NotebookRestApi.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/NotebookRestApi.java
deleted file mode 100644
index 4f9ae4f..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/NotebookRestApi.java
+++ /dev/null
@@ -1,98 +0,0 @@
-package com.nflabs.zeppelin.rest;
-
-import java.io.IOException;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.Interpreter.RegisteredInterpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterSetting;
-import com.nflabs.zeppelin.notebook.Notebook;
-import com.nflabs.zeppelin.rest.message.InterpreterSettingListForNoteBind;
-import com.nflabs.zeppelin.server.JsonResponse;
-
-/**
- * Rest api endpoint for the noteBook.
- */
-@Path("/notebook")
-@Produces("application/json")
-public class NotebookRestApi {
-  Logger logger = LoggerFactory.getLogger(NotebookRestApi.class);
-  Gson gson = new Gson();
-  private Notebook notebook;
-
-  public NotebookRestApi() {}
-
-  public NotebookRestApi(Notebook notebook) {
-    this.notebook = notebook;
-  }
-
-  /**
-   * bind a setting to note
-   * @throws IOException 
-   */
-  @PUT
-  @Path("interpreter/bind/{noteId}")
-  public Response bind(@PathParam("noteId") String noteId, String req) throws IOException {
-    List<String> settingIdList = gson.fromJson(req, new TypeToken<List<String>>(){}.getType());
-    notebook.bindInterpretersToNote(noteId, settingIdList);
-    return new JsonResponse(Status.OK).build();
-  }
-
-  /**
-   * list binded setting
-   */
-  @GET
-  @Path("interpreter/bind/{noteId}")
-  public Response bind(@PathParam("noteId") String noteId) {
-    List<InterpreterSettingListForNoteBind> settingList
-      = new LinkedList<InterpreterSettingListForNoteBind>();
-
-    List<InterpreterSetting> selectedSettings = notebook.getBindedInterpreterSettings(noteId);
-    for (InterpreterSetting setting : selectedSettings) {
-      settingList.add(new InterpreterSettingListForNoteBind(
-          setting.id(),
-          setting.getName(),
-          setting.getGroup(),
-          setting.getInterpreterGroup(),
-          true)
-      );
-    }
-
-    List<InterpreterSetting> availableSettings = notebook.getInterpreterFactory().get();
-    for (InterpreterSetting setting : availableSettings) {
-      boolean selected = false;
-      for (InterpreterSetting selectedSetting : selectedSettings) {
-        if (selectedSetting.id().equals(setting.id())) {
-          selected = true;
-          break;
-        }
-      }
-
-      if (!selected) {
-        settingList.add(new InterpreterSettingListForNoteBind(
-            setting.id(),
-            setting.getName(),
-            setting.getGroup(),
-            setting.getInterpreterGroup(),
-            false)
-        );
-      }
-    }
-    return new JsonResponse(Status.OK, "", settingList).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/ZeppelinRestApi.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/ZeppelinRestApi.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/ZeppelinRestApi.java
deleted file mode 100644
index 11b27a6..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/ZeppelinRestApi.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package com.nflabs.zeppelin.rest;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Response;
-
-import com.wordnik.swagger.annotations.Api;
-
-/**
- * Zeppelin root rest api endpoint.
- *
- * @author anthonycorbacho
- * @since 0.3.4
- */
-@Path("/")
-@Api(value = "/", description = "Zeppelin REST API root")
-public class ZeppelinRestApi {
-
-  /**
-   * Required by Swagger.
-   */
-  public ZeppelinRestApi() {
-    super();
-  }
-
-  /**
-   * Get the root endpoint Return always 200.
-   *
-   * @return 200 response
-   */
-  @GET
-  public Response getRoot() {
-    return Response.ok().build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/InterpreterSettingListForNoteBind.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/InterpreterSettingListForNoteBind.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/InterpreterSettingListForNoteBind.java
deleted file mode 100644
index e8f4056..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/InterpreterSettingListForNoteBind.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package com.nflabs.zeppelin.rest.message;
-
-import java.util.List;
-
-import com.nflabs.zeppelin.interpreter.Interpreter;
-
-/**
- * InterpreterSetting information for binding
- */
-public class InterpreterSettingListForNoteBind {
-  String id;
-  String name;
-  String group;
-  private boolean selected;
-  private List<Interpreter> interpreters;
-  
-  public InterpreterSettingListForNoteBind(String id, String name,
-      String group, List<Interpreter> interpreters, boolean selected) {
-    super();
-    this.id = id;
-    this.name = name;
-    this.group = group;
-    this.interpreters = interpreters;
-    this.selected = selected;
-  }
-
-  public String getId() {
-    return id;
-  }
-
-  public void setId(String id) {
-    this.id = id;
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  public String getGroup() {
-    return group;
-  }
-
-  public void setGroup(String group) {
-    this.group = group;
-  }
-
-  public List<Interpreter> getInterpreterNames() {
-    return interpreters;
-  }
-
-  public void setInterpreterNames(List<Interpreter> interpreters) {
-    this.interpreters = interpreters;
-  }
-
-  public boolean isSelected() {
-    return selected;
-  }
-
-  public void setSelected(boolean selected) {
-    this.selected = selected;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/NewInterpreterSettingRequest.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/NewInterpreterSettingRequest.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/NewInterpreterSettingRequest.java
deleted file mode 100644
index 4817507..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/NewInterpreterSettingRequest.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.nflabs.zeppelin.rest.message;
-
-import java.util.Map;
-
-import com.nflabs.zeppelin.interpreter.InterpreterOption;
-
-/**
- *  NewInterpreterSetting rest api request message
- *
- */
-public class NewInterpreterSettingRequest {
-  String name;
-  String group;
-  InterpreterOption option;
-  Map<String, String> properties;
-
-  public NewInterpreterSettingRequest() {
-
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  public String getGroup() {
-    return group;
-  }
-
-  public Map<String, String> getProperties() {
-    return properties;
-  }
-
-  public InterpreterOption getOption() {
-    return option;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/UpdateInterpreterSettingRequest.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/UpdateInterpreterSettingRequest.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/UpdateInterpreterSettingRequest.java
deleted file mode 100644
index 5f18a46..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/rest/message/UpdateInterpreterSettingRequest.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.nflabs.zeppelin.rest.message;
-
-import java.util.Properties;
-
-import com.nflabs.zeppelin.interpreter.InterpreterOption;
-
-/**
- *
- */
-public class UpdateInterpreterSettingRequest {
-  InterpreterOption option;
-  Properties properties;
-
-  public UpdateInterpreterSettingRequest(InterpreterOption option,
-      Properties properties) {
-    super();
-    this.option = option;
-    this.properties = properties;
-  }
-  public InterpreterOption getOption() {
-    return option;
-  }
-  public Properties getProperties() {
-    return properties;
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/AppScriptServlet.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/AppScriptServlet.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/AppScriptServlet.java
deleted file mode 100644
index d2b3cf5..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/AppScriptServlet.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package com.nflabs.zeppelin.server;
-
-import org.eclipse.jetty.servlet.DefaultServlet;
-import org.eclipse.jetty.util.resource.Resource;
-
-import java.io.InputStream;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
- 
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-/**
- * Simple servlet to dynamically set the Websocket port
- * in the JavaScript sent to the client
- */
-public class AppScriptServlet extends DefaultServlet {
-
-  // Hash containing the possible scripts that contain the getPort()
-  // function originally defined in app.js
-  private static Set<String> scriptPaths = new HashSet<String>(
-    Arrays.asList(
-      "/scripts/scripts.js",
-      "/scripts/app.js"
-    )
-  );
-
-  private int websocketPort;
-
-  public AppScriptServlet(int websocketPort) {
-    this.websocketPort = websocketPort;
-  }
-
-  @Override
-  protected void doGet(HttpServletRequest request, HttpServletResponse response)
-      throws ServletException,
-          IOException {
-    
-    // Process all requests not for the app script to the parent
-    // class
-    String uri = request.getRequestURI();
-    if (!scriptPaths.contains(uri)) {
-      super.doGet(request, response);
-      return;
-    }
-
-    // Read the script file chunk by chunk
-    Resource scriptFile = getResource(uri);
-    InputStream is = scriptFile.getInputStream();
-    StringBuffer script = new StringBuffer();
-    byte[] buffer = new byte[1024];
-    while (is.available() > 0) {
-      int numRead = is.read(buffer);
-      if (numRead <= 0) {
-        break;
-      }
-      script.append(new String(buffer, 0, numRead, "UTF-8"));
-    }
-
-    // Replace the string "function getPort(){...}" to return
-    // the proper value
-    int startIndex = script.indexOf("function getPort()");
-    int endIndex = script.indexOf("}", startIndex);
-
-    if (startIndex >= 0 && endIndex >= 0) {
-      String replaceString = "function getPort(){return " + websocketPort + "}";
-      script.replace(startIndex, endIndex + 1, replaceString);
-    }
-
-    response.getWriter().println(script.toString());
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/CorsFilter.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/CorsFilter.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/CorsFilter.java
deleted file mode 100644
index 9783ca3..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/CorsFilter.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package com.nflabs.zeppelin.server;
-
-import java.io.IOException;
-import java.text.DateFormat;
-import java.util.Date;
-import java.util.Locale;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-/**
- * Cors filter
- *
- */
-public class CorsFilter implements Filter {
-
-  @Override
-  public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain)
-      throws IOException, ServletException {
-    if (((HttpServletRequest) request).getMethod().equals("OPTIONS")) {
-      HttpServletResponse resp = ((HttpServletResponse) response);
-      addCorsHeaders(resp);
-      return;
-    }
-
-    if (response instanceof HttpServletResponse) {
-      HttpServletResponse alteredResponse = ((HttpServletResponse) response);      
-      addCorsHeaders(alteredResponse);
-    }
-    filterChain.doFilter(request, response);
-  }
-
-  private void addCorsHeaders(HttpServletResponse response) {
-    response.addHeader("Access-Control-Allow-Origin", "*");
-    response.addHeader("Access-Control-Allow-Credentials", "true");
-    response.addHeader("Access-Control-Allow-Headers", "authorization,Content-Type");
-    response.addHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, HEAD, DELETE");
-    DateFormat fullDateFormatEN =
-        DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL, new Locale("EN", "en"));
-    response.addHeader("Date", fullDateFormatEN.format(new Date()));
-  }
-
-  @Override
-  public void destroy() {}
-
-  @Override
-  public void init(FilterConfig filterConfig) throws ServletException {}
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/JsonResponse.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/JsonResponse.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/JsonResponse.java
deleted file mode 100644
index c5e81bc..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/JsonResponse.java
+++ /dev/null
@@ -1,126 +0,0 @@
-package com.nflabs.zeppelin.server;
-
-import java.util.ArrayList;
-
-import javax.ws.rs.core.NewCookie;
-import javax.ws.rs.core.Response.ResponseBuilder;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.nflabs.zeppelin.interpreter.Interpreter;
-import com.nflabs.zeppelin.interpreter.InterpreterSerializer;
-
-/**
- * Json response builder.
- * 
- * @author Leemoonsoo
- *
- * @param <T>
- */
-public class JsonResponse<T> {
-  private javax.ws.rs.core.Response.Status status;
-  private String message;
-  private T body;
-  transient ArrayList<NewCookie> cookies;
-  transient boolean pretty = false;
-
-  public JsonResponse(javax.ws.rs.core.Response.Status status) {
-    this.status = status;
-    this.message = null;
-    this.body = null;
-
-  }
-
-  public JsonResponse(javax.ws.rs.core.Response.Status status, String message) {
-    this.status = status;
-    this.message = message;
-    this.body = null;
-  }
-
-  public JsonResponse(javax.ws.rs.core.Response.Status status, T body) {
-    this.status = status;
-    this.message = null;
-    this.body = body;
-  }
-
-  public JsonResponse(javax.ws.rs.core.Response.Status status, String message, T body) {
-    this.status = status;
-    this.message = message;
-    this.body = body;
-  }
-
-  public JsonResponse<T> setPretty(boolean pretty) {
-    this.pretty = pretty;
-    return this;
-  }
-
-  /**
-   * Add cookie for building.
-   * 
-   * @param newCookie
-   * @return
-   */
-  public JsonResponse<T> addCookie(NewCookie newCookie) {
-    if (cookies == null) {
-      cookies = new ArrayList<NewCookie>();
-    }
-    cookies.add(newCookie);
-
-    return this;
-  }
-
-  /**
-   * Add cookie for building.
-   * 
-   * @param name
-   * @param value
-   * @return
-   */
-  public JsonResponse<?> addCookie(String name, String value) {
-    return addCookie(new NewCookie(name, value));
-  }
-
-  public String toString() {
-    GsonBuilder gsonBuilder = new GsonBuilder()
-      .registerTypeAdapter(Interpreter.class, new InterpreterSerializer());    
-    if (pretty) {
-      gsonBuilder.setPrettyPrinting(); 
-    } 
-    Gson gson = gsonBuilder.create();
-    return gson.toJson(this);
-  }
-
-  public javax.ws.rs.core.Response.Status getCode() {
-    return status;
-  }
-
-  public void setCode(javax.ws.rs.core.Response.Status status) {
-    this.status = status;
-  }
-
-  public String getMessage() {
-    return message;
-  }
-
-  public void setMessage(String message) {
-    this.message = message;
-  }
-
-  public T getBody() {
-    return body;
-  }
-
-  public void setBody(T body) {
-    this.body = body;
-  }
-
-  public javax.ws.rs.core.Response build() {
-    ResponseBuilder r = javax.ws.rs.core.Response.status(status).entity(this.toString());
-    if (cookies != null) {
-      for (NewCookie nc : cookies) {
-        r.cookie(nc);
-      }
-    }
-    return r.build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/ZeppelinServer.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/ZeppelinServer.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/ZeppelinServer.java
deleted file mode 100644
index 9bd5d69..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/server/ZeppelinServer.java
+++ /dev/null
@@ -1,311 +0,0 @@
-package com.nflabs.zeppelin.server;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.Set;
-
-import javax.net.ssl.SSLContext;
-import javax.servlet.DispatcherType;
-import javax.ws.rs.core.Application;
-
-import org.apache.cxf.jaxrs.servlet.CXFNonSpringJaxrsServlet;
-import org.eclipse.jetty.server.Handler;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.bio.SocketConnector;
-import org.eclipse.jetty.server.handler.ContextHandlerCollection;
-import org.eclipse.jetty.server.session.SessionHandler;
-import org.eclipse.jetty.server.ssl.SslSocketConnector;
-import org.eclipse.jetty.servlet.DefaultServlet;
-import org.eclipse.jetty.servlet.FilterHolder;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.eclipse.jetty.util.ssl.SslContextFactory;
-import org.eclipse.jetty.webapp.WebAppContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration;
-import com.nflabs.zeppelin.conf.ZeppelinConfiguration.ConfVars;
-import com.nflabs.zeppelin.interpreter.InterpreterFactory;
-import com.nflabs.zeppelin.notebook.Notebook;
-import com.nflabs.zeppelin.rest.InterpreterRestApi;
-import com.nflabs.zeppelin.rest.NotebookRestApi;
-import com.nflabs.zeppelin.rest.ZeppelinRestApi;
-import com.nflabs.zeppelin.scheduler.SchedulerFactory;
-import com.nflabs.zeppelin.socket.NotebookServer;
-import com.nflabs.zeppelin.socket.SslWebSocketServerFactory;
-import com.wordnik.swagger.jersey.config.JerseyJaxrsConfig;
-
-/**
- * Main class of Zeppelin.
- *
- * @author Leemoonsoo
- *
- */
-
-public class ZeppelinServer extends Application {
-  private static final Logger LOG = LoggerFactory.getLogger(ZeppelinServer.class);
-
-  private SchedulerFactory schedulerFactory;
-  public static Notebook notebook;
-
-  static NotebookServer notebookServer;
-
-  private InterpreterFactory replFactory;
-
-  public static void main(String[] args) throws Exception {
-    ZeppelinConfiguration conf = ZeppelinConfiguration.create();
-    conf.setProperty("args", args);
-
-    final Server jettyServer = setupJettyServer(conf);
-    notebookServer = setupNotebookServer(conf);
-
-    // REST api
-    final ServletContextHandler restApi = setupRestApiContextHandler();
-    /** NOTE: Swagger-core is included via the web.xml in zeppelin-web
-     * But the rest of swagger is configured here
-     */
-    final ServletContextHandler swagger = setupSwaggerContextHandler(conf);
-
-    // Web UI
-    final WebAppContext webApp = setupWebAppContext(conf);
-    //Below is commented since zeppelin-docs module is removed.
-    //final WebAppContext webAppSwagg = setupWebAppSwagger(conf);
-
-    // add all handlers
-    ContextHandlerCollection contexts = new ContextHandlerCollection();
-    //contexts.setHandlers(new Handler[]{swagger, restApi, webApp, webAppSwagg});
-    contexts.setHandlers(new Handler[]{swagger, restApi, webApp});
-    jettyServer.setHandler(contexts);
-
-    notebookServer.start();
-    LOG.info("Start zeppelin server");
-    jettyServer.start();
-    LOG.info("Started");
-
-    Runtime.getRuntime().addShutdownHook(new Thread(){
-      @Override public void run() {
-        LOG.info("Shutting down Zeppelin Server ... ");
-        try {
-          notebook.getInterpreterFactory().close();
-
-          jettyServer.stop();
-          notebookServer.stop();
-        } catch (Exception e) {
-          LOG.error("Error while stopping servlet container", e);
-        }
-        LOG.info("Bye");
-      }
-    });
-
-
-    // when zeppelin is started inside of ide (especially for eclipse)
-    // for graceful shutdown, input any key in console window
-    if (System.getenv("ZEPPELIN_IDENT_STRING") == null) {
-      try {
-        System.in.read();
-      } catch (IOException e) {
-      }
-      System.exit(0);
-    }
-
-    jettyServer.join();
-  }
-
-  private static Server setupJettyServer(ZeppelinConfiguration conf)
-      throws Exception {
-
-    SocketConnector connector;
-    if (conf.useSsl()) {
-      connector = new SslSocketConnector(getSslContextFactory(conf));
-    }
-    else {
-      connector = new SocketConnector();
-    }
-
-    // Set some timeout options to make debugging easier.
-    int timeout = 1000 * 30;
-    connector.setMaxIdleTime(timeout);
-    connector.setSoLingerTime(-1);
-    connector.setPort(conf.getServerPort());
-
-    final Server server = new Server();
-    server.addConnector(connector);
-
-    return server;
-  }
-
-  private static NotebookServer setupNotebookServer(ZeppelinConfiguration conf)
-      throws Exception {
-
-    NotebookServer server = new NotebookServer(conf.getWebSocketPort());
-
-    // Default WebSocketServer uses unencrypted connector, so only need to
-    // change the connector if SSL should be used.
-    if (conf.useSsl()) {
-      SslWebSocketServerFactory wsf = new SslWebSocketServerFactory(getSslContext(conf));
-      wsf.setNeedClientAuth(conf.useClientAuth());
-      server.setWebSocketFactory(wsf);
-    }
-
-    return server;
-  }
-
-  private static SslContextFactory getSslContextFactory(ZeppelinConfiguration conf)
-      throws Exception {
-
-    // Note that the API for the SslContextFactory is different for
-    // Jetty version 9
-    SslContextFactory sslContextFactory = new SslContextFactory();
-
-    // Set keystore
-    sslContextFactory.setKeyStore(conf.getKeyStorePath());
-    sslContextFactory.setKeyStoreType(conf.getKeyStoreType());
-    sslContextFactory.setKeyStorePassword(conf.getKeyStorePassword());
-    sslContextFactory.setKeyManagerPassword(conf.getKeyManagerPassword());
-
-    // Set truststore
-    sslContextFactory.setTrustStore(conf.getTrustStorePath());
-    sslContextFactory.setTrustStoreType(conf.getTrustStoreType());
-    sslContextFactory.setTrustStorePassword(conf.getTrustStorePassword());
-
-    sslContextFactory.setNeedClientAuth(conf.useClientAuth());
-
-    return sslContextFactory;
-  }
-
-  private static SSLContext getSslContext(ZeppelinConfiguration conf)
-      throws Exception {
-
-    SslContextFactory scf = getSslContextFactory(conf);
-    if (!scf.isStarted()) {
-      scf.start();
-    }
-    return scf.getSslContext();
-  }
-
-  private static ServletContextHandler setupRestApiContextHandler() {
-    final ServletHolder cxfServletHolder = new ServletHolder(new CXFNonSpringJaxrsServlet());
-    cxfServletHolder.setInitParameter("javax.ws.rs.Application", ZeppelinServer.class.getName());
-    cxfServletHolder.setName("rest");
-    cxfServletHolder.setForcedPath("rest");
-
-    final ServletContextHandler cxfContext = new ServletContextHandler();
-    cxfContext.setSessionHandler(new SessionHandler());
-    cxfContext.setContextPath("/api");
-    cxfContext.addServlet(cxfServletHolder, "/*");
-
-    cxfContext.addFilter(new FilterHolder(CorsFilter.class), "/*",
-        EnumSet.allOf(DispatcherType.class));
-    return cxfContext;
-  }
-
-  /**
-   * Swagger core handler - Needed for the RestFul api documentation.
-   *
-   * @return ServletContextHandler of Swagger
-   */
-  private static ServletContextHandler setupSwaggerContextHandler(
-    ZeppelinConfiguration conf) {
-
-    // Configure Swagger-core
-    final ServletHolder swaggerServlet =
-        new ServletHolder(new JerseyJaxrsConfig());
-    swaggerServlet.setName("JerseyJaxrsConfig");
-    swaggerServlet.setInitParameter("api.version", "1.0.0");
-    swaggerServlet.setInitParameter(
-        "swagger.api.basepath",
-        "http://localhost:" + conf.getServerPort() + "/api");
-    swaggerServlet.setInitOrder(2);
-
-    // Setup the handler
-    final ServletContextHandler handler = new ServletContextHandler();
-    handler.setSessionHandler(new SessionHandler());
-    // Bind Swagger-core to the url HOST/api-docs
-    handler.addServlet(swaggerServlet, "/api-docs/*");
-
-    // And we are done
-    return handler;
-  }
-
-  private static WebAppContext setupWebAppContext(
-      ZeppelinConfiguration conf) {
-
-    WebAppContext webApp = new WebAppContext();
-    File warPath = new File(conf.getString(ConfVars.ZEPPELIN_WAR));
-    if (warPath.isDirectory()) {
-      // Development mode, read from FS
-      // webApp.setDescriptor(warPath+"/WEB-INF/web.xml");
-      webApp.setResourceBase(warPath.getPath());
-      webApp.setContextPath("/");
-      webApp.setParentLoaderPriority(true);
-    } else {
-      // use packaged WAR
-      webApp.setWar(warPath.getAbsolutePath());
-    }
-    // Explicit bind to root
-    webApp.addServlet(
-      new ServletHolder(new AppScriptServlet(conf.getWebSocketPort())),
-      "/*"
-    );
-    return webApp;
-  }
-
-  /**
-   * Handles the WebApplication for Swagger-ui.
-   *
-   * @return WebAppContext with swagger ui context
-   */
-  /*private static WebAppContext setupWebAppSwagger(
-      ZeppelinConfiguration conf) {
-
-    WebAppContext webApp = new WebAppContext();
-    File warPath = new File(conf.getString(ConfVars.ZEPPELIN_API_WAR));
-
-    if (warPath.isDirectory()) {
-      webApp.setResourceBase(warPath.getPath());
-    } else {
-      webApp.setWar(warPath.getAbsolutePath());
-    }
-    webApp.setContextPath("/docs");
-    webApp.setParentLoaderPriority(true);
-    // Bind swagger-ui to the path HOST/docs
-    webApp.addServlet(new ServletHolder(new DefaultServlet()), "/docs/*");
-    return webApp;
-  }*/
-
-  public ZeppelinServer() throws Exception {
-    ZeppelinConfiguration conf = ZeppelinConfiguration.create();
-
-    this.schedulerFactory = new SchedulerFactory();
-
-    this.replFactory = new InterpreterFactory(conf);
-    notebook = new Notebook(conf, schedulerFactory, replFactory, notebookServer);
-  }
-
-  @Override
-  public Set<Class<?>> getClasses() {
-    Set<Class<?>> classes = new HashSet<Class<?>>();
-    return classes;
-  }
-
-  @Override
-  public java.util.Set<java.lang.Object> getSingletons() {
-    Set<Object> singletons = new HashSet<Object>();
-
-    /** Rest-api root endpoint */
-    ZeppelinRestApi root = new ZeppelinRestApi();
-    singletons.add(root);
-
-    NotebookRestApi notebookApi = new NotebookRestApi(notebook);
-    singletons.add(notebookApi);
-
-    InterpreterRestApi interpreterApi = new InterpreterRestApi(replFactory);
-    singletons.add(interpreterApi);
-
-    return singletons;
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/Message.java
----------------------------------------------------------------------
diff --git a/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/Message.java b/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/Message.java
deleted file mode 100644
index 6ed7db9..0000000
--- a/zeppelin-server/src/main/java/com/nflabs/zeppelin/socket/Message.java
+++ /dev/null
@@ -1,93 +0,0 @@
-package com.nflabs.zeppelin.socket;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Zeppelin websocker massage template class.
- * 
- * @author Leemoonsoo
- *
- */
-public class Message {
-  /**
-   * Representation of event type.
-   * 
-   * @author Leemoonsoo
-   *
-   */
-  public static enum OP {
-    GET_NOTE, // [c-s] client load note
-              // @param id note id
-
-    NOTE, // [s-c] note info
-          // @param note serlialized Note object
-
-    PARAGRAPH, // [s-c] paragraph info
-               // @param paragraph serialized paragraph object
-
-    PROGRESS, // [s-c] progress update
-              // @param id paragraph id
-              // @param progress percentage progress
-
-    NEW_NOTE, // [c-s] create new notebook
-    DEL_NOTE, // [c-s] delete notebook
-              // @param id note id
-    NOTE_UPDATE,
-
-    RUN_PARAGRAPH, // [c-s] run paragraph
-                   // @param id paragraph id
-                  // @param paragraph paragraph content.ie. script
-                  // @param config paragraph config
-                  // @param params paragraph params
-
-    COMMIT_PARAGRAPH, // [c-s] commit paragraph
-                      // @param id paragraph id
-                      // @param title paragraph title
-                      // @param paragraph paragraph content.ie. script
-                      // @param config paragraph config
-                      // @param params paragraph params
-
-    CANCEL_PARAGRAPH, // [c-s] cancel paragraph run
-                      // @param id paragraph id
-
-    MOVE_PARAGRAPH, // [c-s] move paragraph order
-                    // @param id paragraph id
-                    // @param index index the paragraph want to go
-
-    INSERT_PARAGRAPH, // [c-s] create new paragraph below current paragraph
-                      // @param target index
-
-    COMPLETION, // [c-s] ask completion candidates
-                // @param id
-                // @param buf current code
-                // @param cursor cursor position in code
-
-    COMPLETION_LIST, // [s-c] send back completion candidates list
-                     // @param id
-                     // @param completions list of string
-
-    LIST_NOTES, // [c-s] ask list of note
-
-    NOTES_INFO, // [s-c] list of note infos
-                // @param notes serialized List<NoteInfo> object
-
-    PARAGRAPH_REMOVE,
-  }
-
-  public OP op;
-  public Map<String, Object> data = new HashMap<String, Object>();
-
-  public Message(OP op) {
-    this.op = op;
-  }
-
-  public Message put(String k, Object v) {
-    data.put(k, v);
-    return this;
-  }
-
-  public Object get(String k) {
-    return data.get(k);
-  }
-}


[12/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterContext.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterContext.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterContext.java
deleted file mode 100644
index 3d14185..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterContext.java
+++ /dev/null
@@ -1,786 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.9.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- *  @generated
- */
-package com.nflabs.zeppelin.interpreter.thrift;
-
-import org.apache.thrift.scheme.IScheme;
-import org.apache.thrift.scheme.SchemeFactory;
-import org.apache.thrift.scheme.StandardScheme;
-
-import org.apache.thrift.scheme.TupleScheme;
-import org.apache.thrift.protocol.TTupleProtocol;
-import org.apache.thrift.protocol.TProtocolException;
-import org.apache.thrift.EncodingUtils;
-import org.apache.thrift.TException;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class RemoteInterpreterContext implements org.apache.thrift.TBase<RemoteInterpreterContext, RemoteInterpreterContext._Fields>, java.io.Serializable, Cloneable {
-  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteInterpreterContext");
-
-  private static final org.apache.thrift.protocol.TField PARAGRAPH_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphId", org.apache.thrift.protocol.TType.STRING, (short)1);
-  private static final org.apache.thrift.protocol.TField PARAGRAPH_TITLE_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphTitle", org.apache.thrift.protocol.TType.STRING, (short)2);
-  private static final org.apache.thrift.protocol.TField PARAGRAPH_TEXT_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphText", org.apache.thrift.protocol.TType.STRING, (short)3);
-  private static final org.apache.thrift.protocol.TField CONFIG_FIELD_DESC = new org.apache.thrift.protocol.TField("config", org.apache.thrift.protocol.TType.STRING, (short)4);
-  private static final org.apache.thrift.protocol.TField GUI_FIELD_DESC = new org.apache.thrift.protocol.TField("gui", org.apache.thrift.protocol.TType.STRING, (short)5);
-
-  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
-  static {
-    schemes.put(StandardScheme.class, new RemoteInterpreterContextStandardSchemeFactory());
-    schemes.put(TupleScheme.class, new RemoteInterpreterContextTupleSchemeFactory());
-  }
-
-  public String paragraphId; // required
-  public String paragraphTitle; // required
-  public String paragraphText; // required
-  public String config; // required
-  public String gui; // required
-
-  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-    PARAGRAPH_ID((short)1, "paragraphId"),
-    PARAGRAPH_TITLE((short)2, "paragraphTitle"),
-    PARAGRAPH_TEXT((short)3, "paragraphText"),
-    CONFIG((short)4, "config"),
-    GUI((short)5, "gui");
-
-    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
-    static {
-      for (_Fields field : EnumSet.allOf(_Fields.class)) {
-        byName.put(field.getFieldName(), field);
-      }
-    }
-
-    /**
-     * Find the _Fields constant that matches fieldId, or null if its not found.
-     */
-    public static _Fields findByThriftId(int fieldId) {
-      switch(fieldId) {
-        case 1: // PARAGRAPH_ID
-          return PARAGRAPH_ID;
-        case 2: // PARAGRAPH_TITLE
-          return PARAGRAPH_TITLE;
-        case 3: // PARAGRAPH_TEXT
-          return PARAGRAPH_TEXT;
-        case 4: // CONFIG
-          return CONFIG;
-        case 5: // GUI
-          return GUI;
-        default:
-          return null;
-      }
-    }
-
-    /**
-     * Find the _Fields constant that matches fieldId, throwing an exception
-     * if it is not found.
-     */
-    public static _Fields findByThriftIdOrThrow(int fieldId) {
-      _Fields fields = findByThriftId(fieldId);
-      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-      return fields;
-    }
-
-    /**
-     * Find the _Fields constant that matches name, or null if its not found.
-     */
-    public static _Fields findByName(String name) {
-      return byName.get(name);
-    }
-
-    private final short _thriftId;
-    private final String _fieldName;
-
-    _Fields(short thriftId, String fieldName) {
-      _thriftId = thriftId;
-      _fieldName = fieldName;
-    }
-
-    public short getThriftFieldId() {
-      return _thriftId;
-    }
-
-    public String getFieldName() {
-      return _fieldName;
-    }
-  }
-
-  // isset id assignments
-  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-  static {
-    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.PARAGRAPH_ID, new org.apache.thrift.meta_data.FieldMetaData("paragraphId", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.PARAGRAPH_TITLE, new org.apache.thrift.meta_data.FieldMetaData("paragraphTitle", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.PARAGRAPH_TEXT, new org.apache.thrift.meta_data.FieldMetaData("paragraphText", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.CONFIG, new org.apache.thrift.meta_data.FieldMetaData("config", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.GUI, new org.apache.thrift.meta_data.FieldMetaData("gui", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    metaDataMap = Collections.unmodifiableMap(tmpMap);
-    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteInterpreterContext.class, metaDataMap);
-  }
-
-  public RemoteInterpreterContext() {
-  }
-
-  public RemoteInterpreterContext(
-    String paragraphId,
-    String paragraphTitle,
-    String paragraphText,
-    String config,
-    String gui)
-  {
-    this();
-    this.paragraphId = paragraphId;
-    this.paragraphTitle = paragraphTitle;
-    this.paragraphText = paragraphText;
-    this.config = config;
-    this.gui = gui;
-  }
-
-  /**
-   * Performs a deep copy on <i>other</i>.
-   */
-  public RemoteInterpreterContext(RemoteInterpreterContext other) {
-    if (other.isSetParagraphId()) {
-      this.paragraphId = other.paragraphId;
-    }
-    if (other.isSetParagraphTitle()) {
-      this.paragraphTitle = other.paragraphTitle;
-    }
-    if (other.isSetParagraphText()) {
-      this.paragraphText = other.paragraphText;
-    }
-    if (other.isSetConfig()) {
-      this.config = other.config;
-    }
-    if (other.isSetGui()) {
-      this.gui = other.gui;
-    }
-  }
-
-  public RemoteInterpreterContext deepCopy() {
-    return new RemoteInterpreterContext(this);
-  }
-
-  @Override
-  public void clear() {
-    this.paragraphId = null;
-    this.paragraphTitle = null;
-    this.paragraphText = null;
-    this.config = null;
-    this.gui = null;
-  }
-
-  public String getParagraphId() {
-    return this.paragraphId;
-  }
-
-  public RemoteInterpreterContext setParagraphId(String paragraphId) {
-    this.paragraphId = paragraphId;
-    return this;
-  }
-
-  public void unsetParagraphId() {
-    this.paragraphId = null;
-  }
-
-  /** Returns true if field paragraphId is set (has been assigned a value) and false otherwise */
-  public boolean isSetParagraphId() {
-    return this.paragraphId != null;
-  }
-
-  public void setParagraphIdIsSet(boolean value) {
-    if (!value) {
-      this.paragraphId = null;
-    }
-  }
-
-  public String getParagraphTitle() {
-    return this.paragraphTitle;
-  }
-
-  public RemoteInterpreterContext setParagraphTitle(String paragraphTitle) {
-    this.paragraphTitle = paragraphTitle;
-    return this;
-  }
-
-  public void unsetParagraphTitle() {
-    this.paragraphTitle = null;
-  }
-
-  /** Returns true if field paragraphTitle is set (has been assigned a value) and false otherwise */
-  public boolean isSetParagraphTitle() {
-    return this.paragraphTitle != null;
-  }
-
-  public void setParagraphTitleIsSet(boolean value) {
-    if (!value) {
-      this.paragraphTitle = null;
-    }
-  }
-
-  public String getParagraphText() {
-    return this.paragraphText;
-  }
-
-  public RemoteInterpreterContext setParagraphText(String paragraphText) {
-    this.paragraphText = paragraphText;
-    return this;
-  }
-
-  public void unsetParagraphText() {
-    this.paragraphText = null;
-  }
-
-  /** Returns true if field paragraphText is set (has been assigned a value) and false otherwise */
-  public boolean isSetParagraphText() {
-    return this.paragraphText != null;
-  }
-
-  public void setParagraphTextIsSet(boolean value) {
-    if (!value) {
-      this.paragraphText = null;
-    }
-  }
-
-  public String getConfig() {
-    return this.config;
-  }
-
-  public RemoteInterpreterContext setConfig(String config) {
-    this.config = config;
-    return this;
-  }
-
-  public void unsetConfig() {
-    this.config = null;
-  }
-
-  /** Returns true if field config is set (has been assigned a value) and false otherwise */
-  public boolean isSetConfig() {
-    return this.config != null;
-  }
-
-  public void setConfigIsSet(boolean value) {
-    if (!value) {
-      this.config = null;
-    }
-  }
-
-  public String getGui() {
-    return this.gui;
-  }
-
-  public RemoteInterpreterContext setGui(String gui) {
-    this.gui = gui;
-    return this;
-  }
-
-  public void unsetGui() {
-    this.gui = null;
-  }
-
-  /** Returns true if field gui is set (has been assigned a value) and false otherwise */
-  public boolean isSetGui() {
-    return this.gui != null;
-  }
-
-  public void setGuiIsSet(boolean value) {
-    if (!value) {
-      this.gui = null;
-    }
-  }
-
-  public void setFieldValue(_Fields field, Object value) {
-    switch (field) {
-    case PARAGRAPH_ID:
-      if (value == null) {
-        unsetParagraphId();
-      } else {
-        setParagraphId((String)value);
-      }
-      break;
-
-    case PARAGRAPH_TITLE:
-      if (value == null) {
-        unsetParagraphTitle();
-      } else {
-        setParagraphTitle((String)value);
-      }
-      break;
-
-    case PARAGRAPH_TEXT:
-      if (value == null) {
-        unsetParagraphText();
-      } else {
-        setParagraphText((String)value);
-      }
-      break;
-
-    case CONFIG:
-      if (value == null) {
-        unsetConfig();
-      } else {
-        setConfig((String)value);
-      }
-      break;
-
-    case GUI:
-      if (value == null) {
-        unsetGui();
-      } else {
-        setGui((String)value);
-      }
-      break;
-
-    }
-  }
-
-  public Object getFieldValue(_Fields field) {
-    switch (field) {
-    case PARAGRAPH_ID:
-      return getParagraphId();
-
-    case PARAGRAPH_TITLE:
-      return getParagraphTitle();
-
-    case PARAGRAPH_TEXT:
-      return getParagraphText();
-
-    case CONFIG:
-      return getConfig();
-
-    case GUI:
-      return getGui();
-
-    }
-    throw new IllegalStateException();
-  }
-
-  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-  public boolean isSet(_Fields field) {
-    if (field == null) {
-      throw new IllegalArgumentException();
-    }
-
-    switch (field) {
-    case PARAGRAPH_ID:
-      return isSetParagraphId();
-    case PARAGRAPH_TITLE:
-      return isSetParagraphTitle();
-    case PARAGRAPH_TEXT:
-      return isSetParagraphText();
-    case CONFIG:
-      return isSetConfig();
-    case GUI:
-      return isSetGui();
-    }
-    throw new IllegalStateException();
-  }
-
-  @Override
-  public boolean equals(Object that) {
-    if (that == null)
-      return false;
-    if (that instanceof RemoteInterpreterContext)
-      return this.equals((RemoteInterpreterContext)that);
-    return false;
-  }
-
-  public boolean equals(RemoteInterpreterContext that) {
-    if (that == null)
-      return false;
-
-    boolean this_present_paragraphId = true && this.isSetParagraphId();
-    boolean that_present_paragraphId = true && that.isSetParagraphId();
-    if (this_present_paragraphId || that_present_paragraphId) {
-      if (!(this_present_paragraphId && that_present_paragraphId))
-        return false;
-      if (!this.paragraphId.equals(that.paragraphId))
-        return false;
-    }
-
-    boolean this_present_paragraphTitle = true && this.isSetParagraphTitle();
-    boolean that_present_paragraphTitle = true && that.isSetParagraphTitle();
-    if (this_present_paragraphTitle || that_present_paragraphTitle) {
-      if (!(this_present_paragraphTitle && that_present_paragraphTitle))
-        return false;
-      if (!this.paragraphTitle.equals(that.paragraphTitle))
-        return false;
-    }
-
-    boolean this_present_paragraphText = true && this.isSetParagraphText();
-    boolean that_present_paragraphText = true && that.isSetParagraphText();
-    if (this_present_paragraphText || that_present_paragraphText) {
-      if (!(this_present_paragraphText && that_present_paragraphText))
-        return false;
-      if (!this.paragraphText.equals(that.paragraphText))
-        return false;
-    }
-
-    boolean this_present_config = true && this.isSetConfig();
-    boolean that_present_config = true && that.isSetConfig();
-    if (this_present_config || that_present_config) {
-      if (!(this_present_config && that_present_config))
-        return false;
-      if (!this.config.equals(that.config))
-        return false;
-    }
-
-    boolean this_present_gui = true && this.isSetGui();
-    boolean that_present_gui = true && that.isSetGui();
-    if (this_present_gui || that_present_gui) {
-      if (!(this_present_gui && that_present_gui))
-        return false;
-      if (!this.gui.equals(that.gui))
-        return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public int hashCode() {
-    return 0;
-  }
-
-  public int compareTo(RemoteInterpreterContext other) {
-    if (!getClass().equals(other.getClass())) {
-      return getClass().getName().compareTo(other.getClass().getName());
-    }
-
-    int lastComparison = 0;
-    RemoteInterpreterContext typedOther = (RemoteInterpreterContext)other;
-
-    lastComparison = Boolean.valueOf(isSetParagraphId()).compareTo(typedOther.isSetParagraphId());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetParagraphId()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphId, typedOther.paragraphId);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetParagraphTitle()).compareTo(typedOther.isSetParagraphTitle());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetParagraphTitle()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphTitle, typedOther.paragraphTitle);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetParagraphText()).compareTo(typedOther.isSetParagraphText());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetParagraphText()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphText, typedOther.paragraphText);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetConfig()).compareTo(typedOther.isSetConfig());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetConfig()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.config, typedOther.config);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetGui()).compareTo(typedOther.isSetGui());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetGui()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.gui, typedOther.gui);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    return 0;
-  }
-
-  public _Fields fieldForId(int fieldId) {
-    return _Fields.findByThriftId(fieldId);
-  }
-
-  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
-  }
-
-  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
-  }
-
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder("RemoteInterpreterContext(");
-    boolean first = true;
-
-    sb.append("paragraphId:");
-    if (this.paragraphId == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.paragraphId);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("paragraphTitle:");
-    if (this.paragraphTitle == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.paragraphTitle);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("paragraphText:");
-    if (this.paragraphText == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.paragraphText);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("config:");
-    if (this.config == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.config);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("gui:");
-    if (this.gui == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.gui);
-    }
-    first = false;
-    sb.append(")");
-    return sb.toString();
-  }
-
-  public void validate() throws org.apache.thrift.TException {
-    // check for required fields
-    // check for sub-struct validity
-  }
-
-  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-    try {
-      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
-    try {
-      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private static class RemoteInterpreterContextStandardSchemeFactory implements SchemeFactory {
-    public RemoteInterpreterContextStandardScheme getScheme() {
-      return new RemoteInterpreterContextStandardScheme();
-    }
-  }
-
-  private static class RemoteInterpreterContextStandardScheme extends StandardScheme<RemoteInterpreterContext> {
-
-    public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteInterpreterContext struct) throws org.apache.thrift.TException {
-      org.apache.thrift.protocol.TField schemeField;
-      iprot.readStructBegin();
-      while (true)
-      {
-        schemeField = iprot.readFieldBegin();
-        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-          break;
-        }
-        switch (schemeField.id) {
-          case 1: // PARAGRAPH_ID
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.paragraphId = iprot.readString();
-              struct.setParagraphIdIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 2: // PARAGRAPH_TITLE
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.paragraphTitle = iprot.readString();
-              struct.setParagraphTitleIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 3: // PARAGRAPH_TEXT
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.paragraphText = iprot.readString();
-              struct.setParagraphTextIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 4: // CONFIG
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.config = iprot.readString();
-              struct.setConfigIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 5: // GUI
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.gui = iprot.readString();
-              struct.setGuiIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          default:
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-        }
-        iprot.readFieldEnd();
-      }
-      iprot.readStructEnd();
-
-      // check for required fields of primitive type, which can't be checked in the validate method
-      struct.validate();
-    }
-
-    public void write(org.apache.thrift.protocol.TProtocol oprot, RemoteInterpreterContext struct) throws org.apache.thrift.TException {
-      struct.validate();
-
-      oprot.writeStructBegin(STRUCT_DESC);
-      if (struct.paragraphId != null) {
-        oprot.writeFieldBegin(PARAGRAPH_ID_FIELD_DESC);
-        oprot.writeString(struct.paragraphId);
-        oprot.writeFieldEnd();
-      }
-      if (struct.paragraphTitle != null) {
-        oprot.writeFieldBegin(PARAGRAPH_TITLE_FIELD_DESC);
-        oprot.writeString(struct.paragraphTitle);
-        oprot.writeFieldEnd();
-      }
-      if (struct.paragraphText != null) {
-        oprot.writeFieldBegin(PARAGRAPH_TEXT_FIELD_DESC);
-        oprot.writeString(struct.paragraphText);
-        oprot.writeFieldEnd();
-      }
-      if (struct.config != null) {
-        oprot.writeFieldBegin(CONFIG_FIELD_DESC);
-        oprot.writeString(struct.config);
-        oprot.writeFieldEnd();
-      }
-      if (struct.gui != null) {
-        oprot.writeFieldBegin(GUI_FIELD_DESC);
-        oprot.writeString(struct.gui);
-        oprot.writeFieldEnd();
-      }
-      oprot.writeFieldStop();
-      oprot.writeStructEnd();
-    }
-
-  }
-
-  private static class RemoteInterpreterContextTupleSchemeFactory implements SchemeFactory {
-    public RemoteInterpreterContextTupleScheme getScheme() {
-      return new RemoteInterpreterContextTupleScheme();
-    }
-  }
-
-  private static class RemoteInterpreterContextTupleScheme extends TupleScheme<RemoteInterpreterContext> {
-
-    @Override
-    public void write(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterContext struct) throws org.apache.thrift.TException {
-      TTupleProtocol oprot = (TTupleProtocol) prot;
-      BitSet optionals = new BitSet();
-      if (struct.isSetParagraphId()) {
-        optionals.set(0);
-      }
-      if (struct.isSetParagraphTitle()) {
-        optionals.set(1);
-      }
-      if (struct.isSetParagraphText()) {
-        optionals.set(2);
-      }
-      if (struct.isSetConfig()) {
-        optionals.set(3);
-      }
-      if (struct.isSetGui()) {
-        optionals.set(4);
-      }
-      oprot.writeBitSet(optionals, 5);
-      if (struct.isSetParagraphId()) {
-        oprot.writeString(struct.paragraphId);
-      }
-      if (struct.isSetParagraphTitle()) {
-        oprot.writeString(struct.paragraphTitle);
-      }
-      if (struct.isSetParagraphText()) {
-        oprot.writeString(struct.paragraphText);
-      }
-      if (struct.isSetConfig()) {
-        oprot.writeString(struct.config);
-      }
-      if (struct.isSetGui()) {
-        oprot.writeString(struct.gui);
-      }
-    }
-
-    @Override
-    public void read(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterContext struct) throws org.apache.thrift.TException {
-      TTupleProtocol iprot = (TTupleProtocol) prot;
-      BitSet incoming = iprot.readBitSet(5);
-      if (incoming.get(0)) {
-        struct.paragraphId = iprot.readString();
-        struct.setParagraphIdIsSet(true);
-      }
-      if (incoming.get(1)) {
-        struct.paragraphTitle = iprot.readString();
-        struct.setParagraphTitleIsSet(true);
-      }
-      if (incoming.get(2)) {
-        struct.paragraphText = iprot.readString();
-        struct.setParagraphTextIsSet(true);
-      }
-      if (incoming.get(3)) {
-        struct.config = iprot.readString();
-        struct.setConfigIsSet(true);
-      }
-      if (incoming.get(4)) {
-        struct.gui = iprot.readString();
-        struct.setGuiIsSet(true);
-      }
-    }
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterResult.java
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterResult.java b/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterResult.java
deleted file mode 100644
index 2e53e30..0000000
--- a/zeppelin-interpreter/src/main/java/com/nflabs/zeppelin/interpreter/thrift/RemoteInterpreterResult.java
+++ /dev/null
@@ -1,786 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.9.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- *  @generated
- */
-package com.nflabs.zeppelin.interpreter.thrift;
-
-import org.apache.thrift.scheme.IScheme;
-import org.apache.thrift.scheme.SchemeFactory;
-import org.apache.thrift.scheme.StandardScheme;
-
-import org.apache.thrift.scheme.TupleScheme;
-import org.apache.thrift.protocol.TTupleProtocol;
-import org.apache.thrift.protocol.TProtocolException;
-import org.apache.thrift.EncodingUtils;
-import org.apache.thrift.TException;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class RemoteInterpreterResult implements org.apache.thrift.TBase<RemoteInterpreterResult, RemoteInterpreterResult._Fields>, java.io.Serializable, Cloneable {
-  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteInterpreterResult");
-
-  private static final org.apache.thrift.protocol.TField CODE_FIELD_DESC = new org.apache.thrift.protocol.TField("code", org.apache.thrift.protocol.TType.STRING, (short)1);
-  private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.STRING, (short)2);
-  private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)3);
-  private static final org.apache.thrift.protocol.TField CONFIG_FIELD_DESC = new org.apache.thrift.protocol.TField("config", org.apache.thrift.protocol.TType.STRING, (short)4);
-  private static final org.apache.thrift.protocol.TField GUI_FIELD_DESC = new org.apache.thrift.protocol.TField("gui", org.apache.thrift.protocol.TType.STRING, (short)5);
-
-  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
-  static {
-    schemes.put(StandardScheme.class, new RemoteInterpreterResultStandardSchemeFactory());
-    schemes.put(TupleScheme.class, new RemoteInterpreterResultTupleSchemeFactory());
-  }
-
-  public String code; // required
-  public String type; // required
-  public String msg; // required
-  public String config; // required
-  public String gui; // required
-
-  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-    CODE((short)1, "code"),
-    TYPE((short)2, "type"),
-    MSG((short)3, "msg"),
-    CONFIG((short)4, "config"),
-    GUI((short)5, "gui");
-
-    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
-    static {
-      for (_Fields field : EnumSet.allOf(_Fields.class)) {
-        byName.put(field.getFieldName(), field);
-      }
-    }
-
-    /**
-     * Find the _Fields constant that matches fieldId, or null if its not found.
-     */
-    public static _Fields findByThriftId(int fieldId) {
-      switch(fieldId) {
-        case 1: // CODE
-          return CODE;
-        case 2: // TYPE
-          return TYPE;
-        case 3: // MSG
-          return MSG;
-        case 4: // CONFIG
-          return CONFIG;
-        case 5: // GUI
-          return GUI;
-        default:
-          return null;
-      }
-    }
-
-    /**
-     * Find the _Fields constant that matches fieldId, throwing an exception
-     * if it is not found.
-     */
-    public static _Fields findByThriftIdOrThrow(int fieldId) {
-      _Fields fields = findByThriftId(fieldId);
-      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-      return fields;
-    }
-
-    /**
-     * Find the _Fields constant that matches name, or null if its not found.
-     */
-    public static _Fields findByName(String name) {
-      return byName.get(name);
-    }
-
-    private final short _thriftId;
-    private final String _fieldName;
-
-    _Fields(short thriftId, String fieldName) {
-      _thriftId = thriftId;
-      _fieldName = fieldName;
-    }
-
-    public short getThriftFieldId() {
-      return _thriftId;
-    }
-
-    public String getFieldName() {
-      return _fieldName;
-    }
-  }
-
-  // isset id assignments
-  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-  static {
-    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.CODE, new org.apache.thrift.meta_data.FieldMetaData("code", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.CONFIG, new org.apache.thrift.meta_data.FieldMetaData("config", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.GUI, new org.apache.thrift.meta_data.FieldMetaData("gui", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    metaDataMap = Collections.unmodifiableMap(tmpMap);
-    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteInterpreterResult.class, metaDataMap);
-  }
-
-  public RemoteInterpreterResult() {
-  }
-
-  public RemoteInterpreterResult(
-    String code,
-    String type,
-    String msg,
-    String config,
-    String gui)
-  {
-    this();
-    this.code = code;
-    this.type = type;
-    this.msg = msg;
-    this.config = config;
-    this.gui = gui;
-  }
-
-  /**
-   * Performs a deep copy on <i>other</i>.
-   */
-  public RemoteInterpreterResult(RemoteInterpreterResult other) {
-    if (other.isSetCode()) {
-      this.code = other.code;
-    }
-    if (other.isSetType()) {
-      this.type = other.type;
-    }
-    if (other.isSetMsg()) {
-      this.msg = other.msg;
-    }
-    if (other.isSetConfig()) {
-      this.config = other.config;
-    }
-    if (other.isSetGui()) {
-      this.gui = other.gui;
-    }
-  }
-
-  public RemoteInterpreterResult deepCopy() {
-    return new RemoteInterpreterResult(this);
-  }
-
-  @Override
-  public void clear() {
-    this.code = null;
-    this.type = null;
-    this.msg = null;
-    this.config = null;
-    this.gui = null;
-  }
-
-  public String getCode() {
-    return this.code;
-  }
-
-  public RemoteInterpreterResult setCode(String code) {
-    this.code = code;
-    return this;
-  }
-
-  public void unsetCode() {
-    this.code = null;
-  }
-
-  /** Returns true if field code is set (has been assigned a value) and false otherwise */
-  public boolean isSetCode() {
-    return this.code != null;
-  }
-
-  public void setCodeIsSet(boolean value) {
-    if (!value) {
-      this.code = null;
-    }
-  }
-
-  public String getType() {
-    return this.type;
-  }
-
-  public RemoteInterpreterResult setType(String type) {
-    this.type = type;
-    return this;
-  }
-
-  public void unsetType() {
-    this.type = null;
-  }
-
-  /** Returns true if field type is set (has been assigned a value) and false otherwise */
-  public boolean isSetType() {
-    return this.type != null;
-  }
-
-  public void setTypeIsSet(boolean value) {
-    if (!value) {
-      this.type = null;
-    }
-  }
-
-  public String getMsg() {
-    return this.msg;
-  }
-
-  public RemoteInterpreterResult setMsg(String msg) {
-    this.msg = msg;
-    return this;
-  }
-
-  public void unsetMsg() {
-    this.msg = null;
-  }
-
-  /** Returns true if field msg is set (has been assigned a value) and false otherwise */
-  public boolean isSetMsg() {
-    return this.msg != null;
-  }
-
-  public void setMsgIsSet(boolean value) {
-    if (!value) {
-      this.msg = null;
-    }
-  }
-
-  public String getConfig() {
-    return this.config;
-  }
-
-  public RemoteInterpreterResult setConfig(String config) {
-    this.config = config;
-    return this;
-  }
-
-  public void unsetConfig() {
-    this.config = null;
-  }
-
-  /** Returns true if field config is set (has been assigned a value) and false otherwise */
-  public boolean isSetConfig() {
-    return this.config != null;
-  }
-
-  public void setConfigIsSet(boolean value) {
-    if (!value) {
-      this.config = null;
-    }
-  }
-
-  public String getGui() {
-    return this.gui;
-  }
-
-  public RemoteInterpreterResult setGui(String gui) {
-    this.gui = gui;
-    return this;
-  }
-
-  public void unsetGui() {
-    this.gui = null;
-  }
-
-  /** Returns true if field gui is set (has been assigned a value) and false otherwise */
-  public boolean isSetGui() {
-    return this.gui != null;
-  }
-
-  public void setGuiIsSet(boolean value) {
-    if (!value) {
-      this.gui = null;
-    }
-  }
-
-  public void setFieldValue(_Fields field, Object value) {
-    switch (field) {
-    case CODE:
-      if (value == null) {
-        unsetCode();
-      } else {
-        setCode((String)value);
-      }
-      break;
-
-    case TYPE:
-      if (value == null) {
-        unsetType();
-      } else {
-        setType((String)value);
-      }
-      break;
-
-    case MSG:
-      if (value == null) {
-        unsetMsg();
-      } else {
-        setMsg((String)value);
-      }
-      break;
-
-    case CONFIG:
-      if (value == null) {
-        unsetConfig();
-      } else {
-        setConfig((String)value);
-      }
-      break;
-
-    case GUI:
-      if (value == null) {
-        unsetGui();
-      } else {
-        setGui((String)value);
-      }
-      break;
-
-    }
-  }
-
-  public Object getFieldValue(_Fields field) {
-    switch (field) {
-    case CODE:
-      return getCode();
-
-    case TYPE:
-      return getType();
-
-    case MSG:
-      return getMsg();
-
-    case CONFIG:
-      return getConfig();
-
-    case GUI:
-      return getGui();
-
-    }
-    throw new IllegalStateException();
-  }
-
-  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-  public boolean isSet(_Fields field) {
-    if (field == null) {
-      throw new IllegalArgumentException();
-    }
-
-    switch (field) {
-    case CODE:
-      return isSetCode();
-    case TYPE:
-      return isSetType();
-    case MSG:
-      return isSetMsg();
-    case CONFIG:
-      return isSetConfig();
-    case GUI:
-      return isSetGui();
-    }
-    throw new IllegalStateException();
-  }
-
-  @Override
-  public boolean equals(Object that) {
-    if (that == null)
-      return false;
-    if (that instanceof RemoteInterpreterResult)
-      return this.equals((RemoteInterpreterResult)that);
-    return false;
-  }
-
-  public boolean equals(RemoteInterpreterResult that) {
-    if (that == null)
-      return false;
-
-    boolean this_present_code = true && this.isSetCode();
-    boolean that_present_code = true && that.isSetCode();
-    if (this_present_code || that_present_code) {
-      if (!(this_present_code && that_present_code))
-        return false;
-      if (!this.code.equals(that.code))
-        return false;
-    }
-
-    boolean this_present_type = true && this.isSetType();
-    boolean that_present_type = true && that.isSetType();
-    if (this_present_type || that_present_type) {
-      if (!(this_present_type && that_present_type))
-        return false;
-      if (!this.type.equals(that.type))
-        return false;
-    }
-
-    boolean this_present_msg = true && this.isSetMsg();
-    boolean that_present_msg = true && that.isSetMsg();
-    if (this_present_msg || that_present_msg) {
-      if (!(this_present_msg && that_present_msg))
-        return false;
-      if (!this.msg.equals(that.msg))
-        return false;
-    }
-
-    boolean this_present_config = true && this.isSetConfig();
-    boolean that_present_config = true && that.isSetConfig();
-    if (this_present_config || that_present_config) {
-      if (!(this_present_config && that_present_config))
-        return false;
-      if (!this.config.equals(that.config))
-        return false;
-    }
-
-    boolean this_present_gui = true && this.isSetGui();
-    boolean that_present_gui = true && that.isSetGui();
-    if (this_present_gui || that_present_gui) {
-      if (!(this_present_gui && that_present_gui))
-        return false;
-      if (!this.gui.equals(that.gui))
-        return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public int hashCode() {
-    return 0;
-  }
-
-  public int compareTo(RemoteInterpreterResult other) {
-    if (!getClass().equals(other.getClass())) {
-      return getClass().getName().compareTo(other.getClass().getName());
-    }
-
-    int lastComparison = 0;
-    RemoteInterpreterResult typedOther = (RemoteInterpreterResult)other;
-
-    lastComparison = Boolean.valueOf(isSetCode()).compareTo(typedOther.isSetCode());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetCode()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.code, typedOther.code);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetType()).compareTo(typedOther.isSetType());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetType()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, typedOther.type);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetMsg()).compareTo(typedOther.isSetMsg());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetMsg()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, typedOther.msg);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetConfig()).compareTo(typedOther.isSetConfig());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetConfig()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.config, typedOther.config);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetGui()).compareTo(typedOther.isSetGui());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetGui()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.gui, typedOther.gui);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    return 0;
-  }
-
-  public _Fields fieldForId(int fieldId) {
-    return _Fields.findByThriftId(fieldId);
-  }
-
-  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
-  }
-
-  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
-  }
-
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder("RemoteInterpreterResult(");
-    boolean first = true;
-
-    sb.append("code:");
-    if (this.code == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.code);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("type:");
-    if (this.type == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.type);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("msg:");
-    if (this.msg == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.msg);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("config:");
-    if (this.config == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.config);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("gui:");
-    if (this.gui == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.gui);
-    }
-    first = false;
-    sb.append(")");
-    return sb.toString();
-  }
-
-  public void validate() throws org.apache.thrift.TException {
-    // check for required fields
-    // check for sub-struct validity
-  }
-
-  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-    try {
-      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
-    try {
-      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private static class RemoteInterpreterResultStandardSchemeFactory implements SchemeFactory {
-    public RemoteInterpreterResultStandardScheme getScheme() {
-      return new RemoteInterpreterResultStandardScheme();
-    }
-  }
-
-  private static class RemoteInterpreterResultStandardScheme extends StandardScheme<RemoteInterpreterResult> {
-
-    public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
-      org.apache.thrift.protocol.TField schemeField;
-      iprot.readStructBegin();
-      while (true)
-      {
-        schemeField = iprot.readFieldBegin();
-        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-          break;
-        }
-        switch (schemeField.id) {
-          case 1: // CODE
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.code = iprot.readString();
-              struct.setCodeIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 2: // TYPE
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.type = iprot.readString();
-              struct.setTypeIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 3: // MSG
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.msg = iprot.readString();
-              struct.setMsgIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 4: // CONFIG
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.config = iprot.readString();
-              struct.setConfigIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 5: // GUI
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.gui = iprot.readString();
-              struct.setGuiIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          default:
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-        }
-        iprot.readFieldEnd();
-      }
-      iprot.readStructEnd();
-
-      // check for required fields of primitive type, which can't be checked in the validate method
-      struct.validate();
-    }
-
-    public void write(org.apache.thrift.protocol.TProtocol oprot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
-      struct.validate();
-
-      oprot.writeStructBegin(STRUCT_DESC);
-      if (struct.code != null) {
-        oprot.writeFieldBegin(CODE_FIELD_DESC);
-        oprot.writeString(struct.code);
-        oprot.writeFieldEnd();
-      }
-      if (struct.type != null) {
-        oprot.writeFieldBegin(TYPE_FIELD_DESC);
-        oprot.writeString(struct.type);
-        oprot.writeFieldEnd();
-      }
-      if (struct.msg != null) {
-        oprot.writeFieldBegin(MSG_FIELD_DESC);
-        oprot.writeString(struct.msg);
-        oprot.writeFieldEnd();
-      }
-      if (struct.config != null) {
-        oprot.writeFieldBegin(CONFIG_FIELD_DESC);
-        oprot.writeString(struct.config);
-        oprot.writeFieldEnd();
-      }
-      if (struct.gui != null) {
-        oprot.writeFieldBegin(GUI_FIELD_DESC);
-        oprot.writeString(struct.gui);
-        oprot.writeFieldEnd();
-      }
-      oprot.writeFieldStop();
-      oprot.writeStructEnd();
-    }
-
-  }
-
-  private static class RemoteInterpreterResultTupleSchemeFactory implements SchemeFactory {
-    public RemoteInterpreterResultTupleScheme getScheme() {
-      return new RemoteInterpreterResultTupleScheme();
-    }
-  }
-
-  private static class RemoteInterpreterResultTupleScheme extends TupleScheme<RemoteInterpreterResult> {
-
-    @Override
-    public void write(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
-      TTupleProtocol oprot = (TTupleProtocol) prot;
-      BitSet optionals = new BitSet();
-      if (struct.isSetCode()) {
-        optionals.set(0);
-      }
-      if (struct.isSetType()) {
-        optionals.set(1);
-      }
-      if (struct.isSetMsg()) {
-        optionals.set(2);
-      }
-      if (struct.isSetConfig()) {
-        optionals.set(3);
-      }
-      if (struct.isSetGui()) {
-        optionals.set(4);
-      }
-      oprot.writeBitSet(optionals, 5);
-      if (struct.isSetCode()) {
-        oprot.writeString(struct.code);
-      }
-      if (struct.isSetType()) {
-        oprot.writeString(struct.type);
-      }
-      if (struct.isSetMsg()) {
-        oprot.writeString(struct.msg);
-      }
-      if (struct.isSetConfig()) {
-        oprot.writeString(struct.config);
-      }
-      if (struct.isSetGui()) {
-        oprot.writeString(struct.gui);
-      }
-    }
-
-    @Override
-    public void read(org.apache.thrift.protocol.TProtocol prot, RemoteInterpreterResult struct) throws org.apache.thrift.TException {
-      TTupleProtocol iprot = (TTupleProtocol) prot;
-      BitSet incoming = iprot.readBitSet(5);
-      if (incoming.get(0)) {
-        struct.code = iprot.readString();
-        struct.setCodeIsSet(true);
-      }
-      if (incoming.get(1)) {
-        struct.type = iprot.readString();
-        struct.setTypeIsSet(true);
-      }
-      if (incoming.get(2)) {
-        struct.msg = iprot.readString();
-        struct.setMsgIsSet(true);
-      }
-      if (incoming.get(3)) {
-        struct.config = iprot.readString();
-        struct.setConfigIsSet(true);
-      }
-      if (incoming.get(4)) {
-        struct.gui = iprot.readString();
-        struct.setGuiIsSet(true);
-      }
-    }
-  }
-
-}
-


[14/17] incubator-zeppelin git commit: Rename package/groupId to org.apache and apply rat plugin.

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositoryListener.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositoryListener.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositoryListener.java
new file mode 100644
index 0000000..8a36f37
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositoryListener.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.sonatype.aether.AbstractRepositoryListener;
+import org.sonatype.aether.RepositoryEvent;
+
+/**
+ * Simple listener that print log.
+ *
+ * @author anthonycorbacho
+ *
+ */
+public class RepositoryListener extends AbstractRepositoryListener {
+  Logger logger = LoggerFactory.getLogger(RepositoryListener.class);
+
+  public RepositoryListener() {}
+
+  @Override
+  public void artifactDeployed(RepositoryEvent event) {
+    logger.info("Deployed " + event.getArtifact() + " to " + event.getRepository());
+  }
+
+  @Override
+  public void artifactDeploying(RepositoryEvent event) {
+    logger.info("Deploying " + event.getArtifact() + " to " + event.getRepository());
+  }
+
+  @Override
+  public void artifactDescriptorInvalid(RepositoryEvent event) {
+    logger.info("Invalid artifact descriptor for " + event.getArtifact() + ": "
+                                                   + event.getException().getMessage());
+  }
+
+  @Override
+  public void artifactDescriptorMissing(RepositoryEvent event) {
+    logger.info("Missing artifact descriptor for " + event.getArtifact());
+  }
+
+  @Override
+  public void artifactInstalled(RepositoryEvent event) {
+    logger.info("Installed " + event.getArtifact() + " to " + event.getFile());
+  }
+
+  @Override
+  public void artifactInstalling(RepositoryEvent event) {
+    logger.info("Installing " + event.getArtifact() + " to " + event.getFile());
+  }
+
+  @Override
+  public void artifactResolved(RepositoryEvent event) {
+    logger.info("Resolved artifact " + event.getArtifact() + " from " + event.getRepository());
+  }
+
+  @Override
+  public void artifactDownloading(RepositoryEvent event) {
+    logger.info("Downloading artifact " + event.getArtifact() + " from " + event.getRepository());
+  }
+
+  @Override
+  public void artifactDownloaded(RepositoryEvent event) {
+    logger.info("Downloaded artifact " + event.getArtifact() + " from " + event.getRepository());
+  }
+
+  @Override
+  public void artifactResolving(RepositoryEvent event) {
+    logger.info("Resolving artifact " + event.getArtifact());
+  }
+
+  @Override
+  public void metadataDeployed(RepositoryEvent event) {
+    logger.info("Deployed " + event.getMetadata() + " to " + event.getRepository());
+  }
+
+  @Override
+  public void metadataDeploying(RepositoryEvent event) {
+    logger.info("Deploying " + event.getMetadata() + " to " + event.getRepository());
+  }
+
+  @Override
+  public void metadataInstalled(RepositoryEvent event) {
+    logger.info("Installed " + event.getMetadata() + " to " + event.getFile());
+  }
+
+  @Override
+  public void metadataInstalling(RepositoryEvent event) {
+    logger.info("Installing " + event.getMetadata() + " to " + event.getFile());
+  }
+
+  @Override
+  public void metadataInvalid(RepositoryEvent event) {
+    logger.info("Invalid metadata " + event.getMetadata());
+  }
+
+  @Override
+  public void metadataResolved(RepositoryEvent event) {
+    logger.info("Resolved metadata " + event.getMetadata() + " from " + event.getRepository());
+  }
+
+  @Override
+  public void metadataResolving(RepositoryEvent event) {
+    logger.info("Resolving metadata " + event.getMetadata() + " from " + event.getRepository());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositorySystemFactory.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositorySystemFactory.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositorySystemFactory.java
new file mode 100644
index 0000000..00fac7f
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/RepositorySystemFactory.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import org.apache.maven.repository.internal.DefaultServiceLocator;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.providers.http.HttpWagon;
+import org.apache.maven.wagon.providers.http.LightweightHttpWagon;
+import org.sonatype.aether.RepositorySystem;
+import org.sonatype.aether.connector.file.FileRepositoryConnectorFactory;
+import org.sonatype.aether.connector.wagon.WagonProvider;
+import org.sonatype.aether.connector.wagon.WagonRepositoryConnectorFactory;
+import org.sonatype.aether.spi.connector.RepositoryConnectorFactory;
+
+/**
+ * Get maven repository instance.
+ *
+ * @author anthonycorbacho
+ *
+ */
+public class RepositorySystemFactory {
+  public static RepositorySystem newRepositorySystem() {
+    DefaultServiceLocator locator = new DefaultServiceLocator();
+    locator.addService(RepositoryConnectorFactory.class, FileRepositoryConnectorFactory.class);
+    locator.addService(RepositoryConnectorFactory.class, WagonRepositoryConnectorFactory.class);
+    locator.setServices(WagonProvider.class, new ManualWagonProvider());
+
+    return locator.getService(RepositorySystem.class);
+  }
+
+  /**
+   * ManualWagonProvider
+   */
+  public static class ManualWagonProvider implements WagonProvider {
+
+    @Override
+    public Wagon lookup(String roleHint) throws Exception {
+      if ("http".equals(roleHint)) {
+        return new LightweightHttpWagon();
+      }
+
+      if ("https".equals(roleHint)) {
+        return new HttpWagon();
+      }
+
+      return null;
+    }
+
+    @Override
+    public void release(Wagon arg0) {
+
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/java/org/apache/zeppelin/spark/dep/TransferListener.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/dep/TransferListener.java b/spark/src/main/java/org/apache/zeppelin/spark/dep/TransferListener.java
new file mode 100644
index 0000000..30daec9
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/dep/TransferListener.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import java.io.PrintStream;
+import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.sonatype.aether.transfer.AbstractTransferListener;
+import org.sonatype.aether.transfer.TransferEvent;
+import org.sonatype.aether.transfer.TransferResource;
+
+/**
+ * Simple listener that show deps downloading progress.
+ *
+ * @author anthonycorbacho
+ *
+ */
+public class TransferListener extends AbstractTransferListener {
+  Logger logger = LoggerFactory.getLogger(TransferListener.class);
+  private PrintStream out;
+
+  private Map<TransferResource, Long> downloads = new ConcurrentHashMap<TransferResource, Long>();
+
+  private int lastLength;
+
+  public TransferListener() {}
+
+  @Override
+  public void transferInitiated(TransferEvent event) {
+    String message =
+        event.getRequestType() == TransferEvent.RequestType.PUT ? "Uploading" : "Downloading";
+
+    logger.info(message + ": " + event.getResource().getRepositoryUrl()
+                + event.getResource().getResourceName());
+  }
+
+  @Override
+  public void transferProgressed(TransferEvent event) {
+    TransferResource resource = event.getResource();
+    downloads.put(resource, Long.valueOf(event.getTransferredBytes()));
+
+    StringBuilder buffer = new StringBuilder(64);
+
+    for (Map.Entry<TransferResource, Long> entry : downloads.entrySet()) {
+      long total = entry.getKey().getContentLength();
+      long complete = entry.getValue().longValue();
+
+      buffer.append(getStatus(complete, total)).append("  ");
+    }
+
+    int pad = lastLength - buffer.length();
+    lastLength = buffer.length();
+    pad(buffer, pad);
+    buffer.append('\r');
+
+    logger.info(buffer.toString());
+  }
+
+  private String getStatus(long complete, long total) {
+    if (total >= 1024) {
+      return toKB(complete) + "/" + toKB(total) + " KB ";
+    } else if (total >= 0) {
+      return complete + "/" + total + " B ";
+    } else if (complete >= 1024) {
+      return toKB(complete) + " KB ";
+    } else {
+      return complete + " B ";
+    }
+  }
+
+  private void pad(StringBuilder buffer, int spaces) {
+    String block = "                                        ";
+    while (spaces > 0) {
+      int n = Math.min(spaces, block.length());
+      buffer.append(block, 0, n);
+      spaces -= n;
+    }
+  }
+
+  @Override
+  public void transferSucceeded(TransferEvent event) {
+    transferCompleted(event);
+
+    TransferResource resource = event.getResource();
+    long contentLength = event.getTransferredBytes();
+    if (contentLength >= 0) {
+      String type =
+          (event.getRequestType() == TransferEvent.RequestType.PUT ? "Uploaded" : "Downloaded");
+      String len = contentLength >= 1024 ? toKB(contentLength) + " KB" : contentLength + " B";
+
+      String throughput = "";
+      long duration = System.currentTimeMillis() - resource.getTransferStartTime();
+      if (duration > 0) {
+        DecimalFormat format = new DecimalFormat("0.0", new DecimalFormatSymbols(Locale.ENGLISH));
+        double kbPerSec = (contentLength / 1024.0) / (duration / 1000.0);
+        throughput = " at " + format.format(kbPerSec) + " KB/sec";
+      }
+
+      logger.info(type + ": " + resource.getRepositoryUrl() + resource.getResourceName() + " ("
+          + len + throughput + ")");
+    }
+  }
+
+  @Override
+  public void transferFailed(TransferEvent event) {
+    transferCompleted(event);
+    event.getException().printStackTrace(out);
+  }
+
+  private void transferCompleted(TransferEvent event) {
+    downloads.remove(event.getResource());
+    StringBuilder buffer = new StringBuilder(64);
+    pad(buffer, lastLength);
+    buffer.append('\r');
+    logger.info(buffer.toString());
+  }
+
+  @Override
+  public void transferCorrupted(TransferEvent event) {
+    event.getException().printStackTrace(out);
+  }
+
+  protected long toKB(long bytes) {
+    return (bytes + 1023) / 1024;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/resources/python/zeppelin_pyspark.py
----------------------------------------------------------------------
diff --git a/spark/src/main/resources/python/zeppelin_pyspark.py b/spark/src/main/resources/python/zeppelin_pyspark.py
index 92baf58..5b70d85 100644
--- a/spark/src/main/resources/python/zeppelin_pyspark.py
+++ b/spark/src/main/resources/python/zeppelin_pyspark.py
@@ -1,3 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 import sys, getopt
 
 from py4j.java_gateway import java_import, JavaGateway, GatewayClient

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkILoop.scala
----------------------------------------------------------------------
diff --git a/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkILoop.scala b/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkILoop.scala
deleted file mode 100644
index 8c63c10..0000000
--- a/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkILoop.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-package com.nflabs.zeppelin.spark
-
-import scala.tools.nsc._
-import scala.tools.nsc.interpreter._
-import org.apache.spark.repl.SparkILoop
-import org.apache.spark.repl.SparkIMain
-import org.apache.spark.util.Utils
-import java.io.BufferedReader
-import scala.tools.nsc.util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
-
-
-class ReflectSparkILoop(in0: Option[BufferedReader], override protected val out: JPrintWriter, override val master: Option[String])
-	  extends SparkILoop(in0, out, master) {
-  def this(in0: BufferedReader, out: JPrintWriter, master: String) = this(Some(in0), out, Some(master))
-  def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out, None)
-  def this() = this(None, new JPrintWriter(Console.out, true), None)  
-  
-
-  class ReflectSparkILoopInterpreter extends ReflectSparkIMain(settings, out) {
-    outer =>
-
-    override lazy val formatting = new Formatting {
-      def prompt = ReflectSparkILoop.this.prompt
-    }
-    override protected def parentClassLoader = SparkHelper.explicitParentLoader(settings).getOrElse(classOf[SparkILoop].getClassLoader)
-  }
-
-  /** Create a new interpreter. */
-  override def createInterpreter() {
-    require(settings != null)
-
-    if (addedClasspath != "") settings.classpath.append(addedClasspath)
-    // work around for Scala bug
-    val totalClassPath = SparkILoop.getAddedJars.foldLeft(
-      settings.classpath.value)((l, r) => ClassPath.join(l, r))
-    this.settings.classpath.value = totalClassPath
-
-    intp = new ReflectSparkILoopInterpreter
-  }
-  
-  /** Create a new interpreter. */
-  def createReflectInterpreter(settings : Settings) : SparkIMain = {
-    require(settings != null)
-
-    if (addedClasspath != "") settings.classpath.append(addedClasspath)
-    // work around for Scala bug
-    val totalClassPath = SparkILoop.getAddedJars.foldLeft(
-      settings.classpath.value)((l, r) => ClassPath.join(l, r))
-    this.settings.classpath.value = totalClassPath
-
-    intp = new ReflectSparkILoopInterpreter
-    intp
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkIMain.scala
----------------------------------------------------------------------
diff --git a/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkIMain.scala b/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkIMain.scala
deleted file mode 100644
index 0ad3696..0000000
--- a/spark/src/main/scala/com/nflabs/zeppelin/spark/ReflectSparkIMain.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package com.nflabs.zeppelin.spark
-
-import scala.tools.nsc._
-import scala.tools.nsc.interpreter._
-import reporters._
-import org.apache.spark.repl.SparkIMain
-import scala.tools.reflect._
-class ReflectSparkIMain(initialSettings: Settings, override val out: JPrintWriter) extends SparkIMain(initialSettings, out) {
-	
-  override def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
-    settings.outputDirs setSingleOutput virtualDirectory
-    settings.exposeEmptyPackage.value = true
-    new ReflectGlobal(settings, reporter, classLoader) with ReplGlobal {
-      override def toString: String = "<global>"
-    }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/main/scala/com/nflabs/zeppelin/spark/SparkRepl.scala
----------------------------------------------------------------------
diff --git a/spark/src/main/scala/com/nflabs/zeppelin/spark/SparkRepl.scala b/spark/src/main/scala/com/nflabs/zeppelin/spark/SparkRepl.scala
deleted file mode 100644
index 9f1d3b4..0000000
--- a/spark/src/main/scala/com/nflabs/zeppelin/spark/SparkRepl.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-package com.nflabs.zeppelin.spark
-
-import com.nflabs.zeppelin.repl.Repl
-import com.nflabs.zeppelin.repl.ReplResult
-import com.nflabs.zeppelin.repl.ReplResult.Code
-import com.nflabs.zeppelin.repl.Repl.FormType
-import java.util.Properties
-import scala.tools.nsc.{Interpreter, Settings}
-import java.io.ByteArrayOutputStream
-import java.io.PrintStream
-import java.io.PrintWriter
-import org.apache.spark.repl.SparkILoop
-import org.apache.spark.repl.SparkIMain;
-import org.apache.spark.SparkContext
-
-class SparkRepl(properties: Properties) extends Repl(properties) {
-  
-  val out = new ByteArrayOutputStream(); 
-  var interpreter : ReflectSparkILoop = _
-  var intp : SparkIMain = _
-  
-  protected def getSparkContext() : SparkContext = {
-    null
-  }
-  
-  override def initialize() = {
-    val cl = Thread.currentThread().getContextClassLoader();
-    
-    val settings = new Settings();
-    settings.usejavacp.value = true
-
-    val printStream = new PrintStream(out)
-    interpreter = new ReflectSparkILoop(null, new PrintWriter(out))
-    interpreter.settings = settings;
-    intp = interpreter.createReflectInterpreter(settings);
-    interpreter.intp = intp
-    intp.initializeSynchronous
-    
-    
-  }
-  override def destroy() = {
-	intp.close()	
-  }
-  override def getValue(name : String) : Object = {
-
-    return null;
-  }
-  override def interpret(st : String) : ReplResult = {
-    return null;
-  }
-	
-  override def cancel() = {
-	  
-  }
-  override def bindValue(name : String, o : Object) = {
-	  
-  }
-  override def getFormType() : FormType = {
-    return FormType.NATIVE;
-  }
-  
-  def getResultCode(r : scala.tools.nsc.interpreter.Results.Result) : Code = {
-
-    if (r.isInstanceOf[instanceof scala.tools.nsc.interpreter.Results.Success$) {
-      return Code.SUCCESS;
-    } else if (r instanceof scala.tools.nsc.interpreter.Results.Incomplete$) {
-      return Code.INCOMPLETE;
-    } else {
-      return Code.ERROR;
-	}
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/com/nflabs/zeppelin/spark/DepInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/com/nflabs/zeppelin/spark/DepInterpreterTest.java b/spark/src/test/java/com/nflabs/zeppelin/spark/DepInterpreterTest.java
deleted file mode 100644
index 7fe8aae..0000000
--- a/spark/src/test/java/com/nflabs/zeppelin/spark/DepInterpreterTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Properties;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-
-public class DepInterpreterTest {
-  private DepInterpreter dep;
-  private InterpreterContext context;
-  private File tmpDir;
-  private SparkInterpreter repl;
-
-  @Before
-  public void setUp() throws Exception {
-    tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis());
-    System.setProperty("zeppelin.dep.localrepo", tmpDir.getAbsolutePath() + "/local-repo");
-
-    tmpDir.mkdirs();
-
-    Properties p = new Properties();
-
-    dep = new DepInterpreter(p);
-    dep.open();
-
-    InterpreterGroup intpGroup = new InterpreterGroup();
-    intpGroup.add(new SparkInterpreter(p));
-    intpGroup.add(dep);
-    dep.setInterpreterGroup(intpGroup);
-
-    context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    dep.close();
-    delete(tmpDir);
-  }
-
-  private void delete(File file) {
-    if (file.isFile()) file.delete();
-    else if (file.isDirectory()) {
-      File[] files = file.listFiles();
-      if (files != null && files.length > 0) {
-        for (File f : files) {
-          delete(f);
-        }
-      }
-      file.delete();
-    }
-  }
-
-  @Test
-  public void testDefault() {
-    dep.getDependencyContext().reset();
-    InterpreterResult ret = dep.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
-    assertEquals(Code.SUCCESS, ret.code());
-
-    assertEquals(1, dep.getDependencyContext().getFiles().size());
-    assertEquals(1, dep.getDependencyContext().getFilesDist().size());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/com/nflabs/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/com/nflabs/zeppelin/spark/SparkInterpreterTest.java b/spark/src/test/java/com/nflabs/zeppelin/spark/SparkInterpreterTest.java
deleted file mode 100644
index ae9fb73..0000000
--- a/spark/src/test/java/com/nflabs/zeppelin/spark/SparkInterpreterTest.java
+++ /dev/null
@@ -1,118 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Properties;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
-
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class SparkInterpreterTest {
-  public static SparkInterpreter repl;
-  private InterpreterContext context;
-  private File tmpDir;
-
-  @Before
-  public void setUp() throws Exception {
-    tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis());
-    System.setProperty("zeppelin.dep.localrepo", tmpDir.getAbsolutePath() + "/local-repo");
-
-    tmpDir.mkdirs();
-
-	  if (repl == null) {
-		  Properties p = new Properties();
-
-	    repl = new SparkInterpreter(p);
-  	  repl.open();
-	  }
-
-    context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
-	}
-
-  @After
-  public void tearDown() throws Exception {
-    delete(tmpDir);
-  }
-
-  private void delete(File file) {
-    if (file.isFile()) file.delete();
-    else if (file.isDirectory()) {
-      File[] files = file.listFiles();
-      if (files != null && files.length > 0) {
-        for (File f : files) {
-          delete(f);
-        }
-      }
-      file.delete();
-    }
-  }
-
-	@Test
-	public void testBasicIntp() {
-		assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("val a = 1\nval b = 2", context).code());
-
-		// when interpret incomplete expression
-		InterpreterResult incomplete = repl.interpret("val a = \"\"\"", context);
-		assertEquals(InterpreterResult.Code.INCOMPLETE, incomplete.code());
-		assertTrue(incomplete.message().length()>0); // expecting some error message
-		/*
-		assertEquals(1, repl.getValue("a"));
-		assertEquals(2, repl.getValue("b"));
-		repl.interpret("val ver = sc.version");
-		assertNotNull(repl.getValue("ver"));
-		assertEquals("HELLO\n", repl.interpret("println(\"HELLO\")").message());
-		*/
-	}
-
-	@Test
-	public void testEndWithComment() {
-		assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("val c=1\n//comment", context).code());
-	}
-
-	@Test
-	public void testSparkSql(){
-		repl.interpret("case class Person(name:String, age:Int)\n", context);
-		repl.interpret("val people = sc.parallelize(Seq(Person(\"moon\", 33), Person(\"jobs\", 51), Person(\"gates\", 51), Person(\"park\", 34)))\n", context);
-		assertEquals(Code.SUCCESS, repl.interpret("people.take(3)", context).code());
-
-		// create new interpreter
-		Properties p = new Properties();
-		SparkInterpreter repl2 = new SparkInterpreter(p);
-		repl2.open();
-
-		repl.interpret("case class Man(name:String, age:Int)", context);
-		repl.interpret("val man = sc.parallelize(Seq(Man(\"moon\", 33), Man(\"jobs\", 51), Man(\"gates\", 51), Man(\"park\", 34)))", context);
-		assertEquals(Code.SUCCESS, repl.interpret("man.take(3)", context).code());
-		repl2.getSparkContext().stop();
-	}
-
-	@Test
-	public void testReferencingUndefinedVal(){
-		InterpreterResult result = repl.interpret("def category(min: Int) = {" +
-				       "    if (0 <= value) \"error\"" +
-                       "}", context);
-		assertEquals(Code.ERROR, result.code());
-	}
-
-  @Test
-  public void testZContextDependencyLoading() {
-    // try to import library does not exist on classpath. it'll fail
-    assertEquals(InterpreterResult.Code.ERROR, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
-
-    // load library from maven repository and try to import again
-    repl.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
-    assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/com/nflabs/zeppelin/spark/SparkSqlInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/com/nflabs/zeppelin/spark/SparkSqlInterpreterTest.java b/spark/src/test/java/com/nflabs/zeppelin/spark/SparkSqlInterpreterTest.java
deleted file mode 100644
index 9eba227..0000000
--- a/spark/src/test/java/com/nflabs/zeppelin/spark/SparkSqlInterpreterTest.java
+++ /dev/null
@@ -1,94 +0,0 @@
-package com.nflabs.zeppelin.spark;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.HashMap;
-import java.util.Properties;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.nflabs.zeppelin.display.GUI;
-import com.nflabs.zeppelin.interpreter.InterpreterContext;
-import com.nflabs.zeppelin.interpreter.InterpreterGroup;
-import com.nflabs.zeppelin.interpreter.InterpreterResult;
-import com.nflabs.zeppelin.interpreter.InterpreterResult.Type;
-
-public class SparkSqlInterpreterTest {
-
-	private SparkSqlInterpreter sql;
-  private SparkInterpreter repl;
-  private InterpreterContext context;
-
-	@Before
-	public void setUp() throws Exception {
-		Properties p = new Properties();
-
-		if (repl == null) {
-
-		  if (SparkInterpreterTest.repl == null) {
-		    repl = new SparkInterpreter(p);
-		    repl.open();
-		    SparkInterpreterTest.repl = repl;
-		  } else {
-		    repl = SparkInterpreterTest.repl;
-		  }
-
-  		sql = new SparkSqlInterpreter(p);
-
-  		InterpreterGroup intpGroup = new InterpreterGroup();
-		  intpGroup.add(repl);
-		  intpGroup.add(sql);
-		  sql.setInterpreterGroup(intpGroup);
-		  sql.open();
-		}
-		context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
-	}
-
-	@After
-	public void tearDown() throws Exception {
-	}
-
-	@Test
-	public void test() {
-		repl.interpret("case class Test(name:String, age:Int)", context);
-		repl.interpret("val test = sc.parallelize(Seq(Test(\"moon\", 33), Test(\"jobs\", 51), Test(\"gates\", 51), Test(\"park\", 34)))", context);
-		repl.interpret("test.registerAsTable(\"test\")", context);
-
-		InterpreterResult ret = sql.interpret("select name, age from test where age < 40", context);
-		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
-		assertEquals(Type.TABLE, ret.type());
-		assertEquals("name\tage\nmoon\t33\npark\t34\n", ret.message());
-
-		assertEquals(InterpreterResult.Code.ERROR, sql.interpret("select wrong syntax", context).code());
-		assertEquals(InterpreterResult.Code.SUCCESS, sql.interpret("select case when name==\"aa\" then name else name end from people", context).code());
-	}
-
-	@Test
-	public void testStruct(){
-		repl.interpret("case class Person(name:String, age:Int)", context);
-		repl.interpret("case class People(group:String, person:Person)", context);
-		repl.interpret("val gr = sc.parallelize(Seq(People(\"g1\", Person(\"moon\",33)), People(\"g2\", Person(\"sun\",11))))", context);
-		repl.interpret("gr.registerAsTable(\"gr\")", context);
-		InterpreterResult ret = sql.interpret("select * from gr", context);
-		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
-	}
-
-	@Test
-	public void test_null_value_in_row() {
-		repl.interpret("import org.apache.spark.sql._", context);
-		repl.interpret("def toInt(s:String): Any = {try { s.trim().toInt} catch {case e:Exception => null}}", context);
-		repl.interpret("val schema = StructType(Seq(StructField(\"name\", StringType, false),StructField(\"age\" , IntegerType, true),StructField(\"other\" , StringType, false)))", context);
-		repl.interpret("val csv = sc.parallelize(Seq((\"jobs, 51, apple\"), (\"gates, , microsoft\")))", context);
-		repl.interpret("val raw = csv.map(_.split(\",\")).map(p => Row(p(0),toInt(p(1)),p(2)))", context);
-		repl.interpret("val people = z.sqlContext.applySchema(raw, schema)", context);
-		repl.interpret("people.registerTempTable(\"people\")", context);
-
-		InterpreterResult ret = sql.interpret("select name, age from people where name = 'gates'", context);
-		System.err.println("RET=" + ret.message());
-		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
-		assertEquals(Type.TABLE, ret.type());
-		assertEquals("name\tage\ngates\tnull\n", ret.message());
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/com/nflabs/zeppelin/spark/dep/DependencyResolverTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/com/nflabs/zeppelin/spark/dep/DependencyResolverTest.java b/spark/src/test/java/com/nflabs/zeppelin/spark/dep/DependencyResolverTest.java
deleted file mode 100644
index 804c31e..0000000
--- a/spark/src/test/java/com/nflabs/zeppelin/spark/dep/DependencyResolverTest.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.nflabs.zeppelin.spark.dep;
-
-import static org.junit.Assert.assertEquals;
-
-import org.junit.Test;
-
-public class DependencyResolverTest {
-
-  @Test
-  public void testInferScalaVersion() {
-    String [] version = scala.util.Properties.versionNumberString().split("[.]");
-    String scalaVersion = version[0] + "." + version[1];
-
-    assertEquals("groupId:artifactId:version",
-        DependencyResolver.inferScalaVersion("groupId:artifactId:version"));
-    assertEquals("groupId:artifactId_" + scalaVersion + ":version",
-        DependencyResolver.inferScalaVersion("groupId::artifactId:version"));
-    assertEquals("groupId:artifactId:version::test",
-        DependencyResolver.inferScalaVersion("groupId:artifactId:version::test"));
-    assertEquals("*",
-        DependencyResolver.inferScalaVersion("*"));
-    assertEquals("groupId:*",
-        DependencyResolver.inferScalaVersion("groupId:*"));
-    assertEquals("groupId:artifactId*",
-        DependencyResolver.inferScalaVersion("groupId:artifactId*"));
-    assertEquals("groupId:artifactId_" + scalaVersion,
-        DependencyResolver.inferScalaVersion("groupId::artifactId"));
-    assertEquals("groupId:artifactId_" + scalaVersion + "*",
-        DependencyResolver.inferScalaVersion("groupId::artifactId*"));
-    assertEquals("groupId:artifactId_" + scalaVersion + ":*",
-        DependencyResolver.inferScalaVersion("groupId::artifactId:*"));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/org/apache/zeppelin/spark/DepInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/DepInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/DepInterpreterTest.java
new file mode 100644
index 0000000..8d24cc4
--- /dev/null
+++ b/spark/src/test/java/org/apache/zeppelin/spark/DepInterpreterTest.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.spark.DepInterpreter;
+import org.apache.zeppelin.spark.SparkInterpreter;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class DepInterpreterTest {
+  private DepInterpreter dep;
+  private InterpreterContext context;
+  private File tmpDir;
+  private SparkInterpreter repl;
+
+  @Before
+  public void setUp() throws Exception {
+    tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis());
+    System.setProperty("zeppelin.dep.localrepo", tmpDir.getAbsolutePath() + "/local-repo");
+
+    tmpDir.mkdirs();
+
+    Properties p = new Properties();
+
+    dep = new DepInterpreter(p);
+    dep.open();
+
+    InterpreterGroup intpGroup = new InterpreterGroup();
+    intpGroup.add(new SparkInterpreter(p));
+    intpGroup.add(dep);
+    dep.setInterpreterGroup(intpGroup);
+
+    context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    dep.close();
+    delete(tmpDir);
+  }
+
+  private void delete(File file) {
+    if (file.isFile()) file.delete();
+    else if (file.isDirectory()) {
+      File[] files = file.listFiles();
+      if (files != null && files.length > 0) {
+        for (File f : files) {
+          delete(f);
+        }
+      }
+      file.delete();
+    }
+  }
+
+  @Test
+  public void testDefault() {
+    dep.getDependencyContext().reset();
+    InterpreterResult ret = dep.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
+    assertEquals(Code.SUCCESS, ret.code());
+
+    assertEquals(1, dep.getDependencyContext().getFiles().size());
+    assertEquals(1, dep.getDependencyContext().getFilesDist().size());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
new file mode 100644
index 0000000..20f7fa4
--- /dev/null
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.spark.SparkInterpreter;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class SparkInterpreterTest {
+  public static SparkInterpreter repl;
+  private InterpreterContext context;
+  private File tmpDir;
+
+  @Before
+  public void setUp() throws Exception {
+    tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis());
+    System.setProperty("zeppelin.dep.localrepo", tmpDir.getAbsolutePath() + "/local-repo");
+
+    tmpDir.mkdirs();
+
+	  if (repl == null) {
+		  Properties p = new Properties();
+
+	    repl = new SparkInterpreter(p);
+  	  repl.open();
+	  }
+
+    context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
+	}
+
+  @After
+  public void tearDown() throws Exception {
+    delete(tmpDir);
+  }
+
+  private void delete(File file) {
+    if (file.isFile()) file.delete();
+    else if (file.isDirectory()) {
+      File[] files = file.listFiles();
+      if (files != null && files.length > 0) {
+        for (File f : files) {
+          delete(f);
+        }
+      }
+      file.delete();
+    }
+  }
+
+	@Test
+	public void testBasicIntp() {
+		assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("val a = 1\nval b = 2", context).code());
+
+		// when interpret incomplete expression
+		InterpreterResult incomplete = repl.interpret("val a = \"\"\"", context);
+		assertEquals(InterpreterResult.Code.INCOMPLETE, incomplete.code());
+		assertTrue(incomplete.message().length()>0); // expecting some error message
+		/*
+		assertEquals(1, repl.getValue("a"));
+		assertEquals(2, repl.getValue("b"));
+		repl.interpret("val ver = sc.version");
+		assertNotNull(repl.getValue("ver"));
+		assertEquals("HELLO\n", repl.interpret("println(\"HELLO\")").message());
+		*/
+	}
+
+	@Test
+	public void testEndWithComment() {
+		assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("val c=1\n//comment", context).code());
+	}
+
+	@Test
+	public void testSparkSql(){
+		repl.interpret("case class Person(name:String, age:Int)\n", context);
+		repl.interpret("val people = sc.parallelize(Seq(Person(\"moon\", 33), Person(\"jobs\", 51), Person(\"gates\", 51), Person(\"park\", 34)))\n", context);
+		assertEquals(Code.SUCCESS, repl.interpret("people.take(3)", context).code());
+
+		// create new interpreter
+		Properties p = new Properties();
+		SparkInterpreter repl2 = new SparkInterpreter(p);
+		repl2.open();
+
+		repl.interpret("case class Man(name:String, age:Int)", context);
+		repl.interpret("val man = sc.parallelize(Seq(Man(\"moon\", 33), Man(\"jobs\", 51), Man(\"gates\", 51), Man(\"park\", 34)))", context);
+		assertEquals(Code.SUCCESS, repl.interpret("man.take(3)", context).code());
+		repl2.getSparkContext().stop();
+	}
+
+	@Test
+	public void testReferencingUndefinedVal(){
+		InterpreterResult result = repl.interpret("def category(min: Int) = {" +
+				       "    if (0 <= value) \"error\"" +
+                       "}", context);
+		assertEquals(Code.ERROR, result.code());
+	}
+
+  @Test
+  public void testZContextDependencyLoading() {
+    // try to import library does not exist on classpath. it'll fail
+    assertEquals(InterpreterResult.Code.ERROR, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
+
+    // load library from maven repository and try to import again
+    repl.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
new file mode 100644
index 0000000..71f088d
--- /dev/null
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Type;
+import org.apache.zeppelin.spark.SparkInterpreter;
+import org.apache.zeppelin.spark.SparkSqlInterpreter;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class SparkSqlInterpreterTest {
+
+	private SparkSqlInterpreter sql;
+  private SparkInterpreter repl;
+  private InterpreterContext context;
+
+	@Before
+	public void setUp() throws Exception {
+		Properties p = new Properties();
+
+		if (repl == null) {
+
+		  if (SparkInterpreterTest.repl == null) {
+		    repl = new SparkInterpreter(p);
+		    repl.open();
+		    SparkInterpreterTest.repl = repl;
+		  } else {
+		    repl = SparkInterpreterTest.repl;
+		  }
+
+  		sql = new SparkSqlInterpreter(p);
+
+  		InterpreterGroup intpGroup = new InterpreterGroup();
+		  intpGroup.add(repl);
+		  intpGroup.add(sql);
+		  sql.setInterpreterGroup(intpGroup);
+		  sql.open();
+		}
+		context = new InterpreterContext("id", "title", "text", new HashMap<String, Object>(), new GUI());
+	}
+
+	@After
+	public void tearDown() throws Exception {
+	}
+
+	@Test
+	public void test() {
+		repl.interpret("case class Test(name:String, age:Int)", context);
+		repl.interpret("val test = sc.parallelize(Seq(Test(\"moon\", 33), Test(\"jobs\", 51), Test(\"gates\", 51), Test(\"park\", 34)))", context);
+		repl.interpret("test.registerAsTable(\"test\")", context);
+
+		InterpreterResult ret = sql.interpret("select name, age from test where age < 40", context);
+		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
+		assertEquals(Type.TABLE, ret.type());
+		assertEquals("name\tage\nmoon\t33\npark\t34\n", ret.message());
+
+		assertEquals(InterpreterResult.Code.ERROR, sql.interpret("select wrong syntax", context).code());
+		assertEquals(InterpreterResult.Code.SUCCESS, sql.interpret("select case when name==\"aa\" then name else name end from people", context).code());
+	}
+
+	@Test
+	public void testStruct(){
+		repl.interpret("case class Person(name:String, age:Int)", context);
+		repl.interpret("case class People(group:String, person:Person)", context);
+		repl.interpret("val gr = sc.parallelize(Seq(People(\"g1\", Person(\"moon\",33)), People(\"g2\", Person(\"sun\",11))))", context);
+		repl.interpret("gr.registerAsTable(\"gr\")", context);
+		InterpreterResult ret = sql.interpret("select * from gr", context);
+		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
+	}
+
+	@Test
+	public void test_null_value_in_row() {
+		repl.interpret("import org.apache.spark.sql._", context);
+		repl.interpret("def toInt(s:String): Any = {try { s.trim().toInt} catch {case e:Exception => null}}", context);
+		repl.interpret("val schema = StructType(Seq(StructField(\"name\", StringType, false),StructField(\"age\" , IntegerType, true),StructField(\"other\" , StringType, false)))", context);
+		repl.interpret("val csv = sc.parallelize(Seq((\"jobs, 51, apple\"), (\"gates, , microsoft\")))", context);
+		repl.interpret("val raw = csv.map(_.split(\",\")).map(p => Row(p(0),toInt(p(1)),p(2)))", context);
+		repl.interpret("val people = z.sqlContext.applySchema(raw, schema)", context);
+		repl.interpret("people.registerTempTable(\"people\")", context);
+
+		InterpreterResult ret = sql.interpret("select name, age from people where name = 'gates'", context);
+		System.err.println("RET=" + ret.message());
+		assertEquals(InterpreterResult.Code.SUCCESS, ret.code());
+		assertEquals(Type.TABLE, ret.type());
+		assertEquals("name\tage\ngates\tnull\n", ret.message());
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/spark/src/test/java/org/apache/zeppelin/spark/dep/DependencyResolverTest.java
----------------------------------------------------------------------
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/dep/DependencyResolverTest.java b/spark/src/test/java/org/apache/zeppelin/spark/dep/DependencyResolverTest.java
new file mode 100644
index 0000000..e41de60
--- /dev/null
+++ b/spark/src/test/java/org/apache/zeppelin/spark/dep/DependencyResolverTest.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark.dep;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.zeppelin.spark.dep.DependencyResolver;
+import org.junit.Test;
+
+public class DependencyResolverTest {
+
+  @Test
+  public void testInferScalaVersion() {
+    String [] version = scala.util.Properties.versionNumberString().split("[.]");
+    String scalaVersion = version[0] + "." + version[1];
+
+    assertEquals("groupId:artifactId:version",
+        DependencyResolver.inferScalaVersion("groupId:artifactId:version"));
+    assertEquals("groupId:artifactId_" + scalaVersion + ":version",
+        DependencyResolver.inferScalaVersion("groupId::artifactId:version"));
+    assertEquals("groupId:artifactId:version::test",
+        DependencyResolver.inferScalaVersion("groupId:artifactId:version::test"));
+    assertEquals("*",
+        DependencyResolver.inferScalaVersion("*"));
+    assertEquals("groupId:*",
+        DependencyResolver.inferScalaVersion("groupId:*"));
+    assertEquals("groupId:artifactId*",
+        DependencyResolver.inferScalaVersion("groupId:artifactId*"));
+    assertEquals("groupId:artifactId_" + scalaVersion,
+        DependencyResolver.inferScalaVersion("groupId::artifactId"));
+    assertEquals("groupId:artifactId_" + scalaVersion + "*",
+        DependencyResolver.inferScalaVersion("groupId::artifactId*"));
+    assertEquals("groupId:artifactId_" + scalaVersion + ":*",
+        DependencyResolver.inferScalaVersion("groupId::artifactId:*"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/testing/startSparkCluster.sh
----------------------------------------------------------------------
diff --git a/testing/startSparkCluster.sh b/testing/startSparkCluster.sh
index 60e8efb..896ed52 100755
--- a/testing/startSparkCluster.sh
+++ b/testing/startSparkCluster.sh
@@ -1,4 +1,21 @@
 #!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 wget http://apache.mesi.com.ar/spark/spark-1.1.1/spark-1.1.1-bin-hadoop2.3.tgz
 tar zxvf spark-1.1.1-bin-hadoop2.3.tgz
 cd spark-1.1.1-bin-hadoop2.3

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/testing/stopSparkCluster.sh
----------------------------------------------------------------------
diff --git a/testing/stopSparkCluster.sh b/testing/stopSparkCluster.sh
index 36b95f4..4f8d7a9 100755
--- a/testing/stopSparkCluster.sh
+++ b/testing/stopSparkCluster.sh
@@ -1,4 +1,21 @@
-#!/bin/sh
+#!/bin/sh#
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 cd spark-1.1.1-bin-hadoop2.3
 ./sbin/stop-master.sh
 kill $(ps -ef | grep 'org.apache.spark.deploy.worker.Worker' | awk '{print $2}')

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/README.md
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/README.md b/zeppelin-distribution/README.md
index dabb7ce..72fd630 100644
--- a/zeppelin-distribution/README.md
+++ b/zeppelin-distribution/README.md
@@ -1,3 +1,20 @@
+<!---
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
 # Distribution archive of Zeppelin project #
 
 Zeppelin is distributed as a single gzip archive with the following structure:

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/build-infrastructure.md
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/build-infrastructure.md b/zeppelin-distribution/build-infrastructure.md
index 582725d..b8b3e20 100644
--- a/zeppelin-distribution/build-infrastructure.md
+++ b/zeppelin-distribution/build-infrastructure.md
@@ -1,3 +1,20 @@
+<!---
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
 Zeppelin dependency graph:
 --------------
                      hive, hadoop, ...

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/pom.xml b/zeppelin-distribution/pom.xml
index 1a4da42..efe83ab 100644
--- a/zeppelin-distribution/pom.xml
+++ b/zeppelin-distribution/pom.xml
@@ -1,11 +1,28 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
     <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
+    <groupId>org.apache.zeppelin</groupId>
     <version>0.5.0-SNAPSHOT</version>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/src/assemble/distribution.xml
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/src/assemble/distribution.xml b/zeppelin-distribution/src/assemble/distribution.xml
index 2edee49..dae34c4 100644
--- a/zeppelin-distribution/src/assemble/distribution.xml
+++ b/zeppelin-distribution/src/assemble/distribution.xml
@@ -1,3 +1,20 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
 <assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
     xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
@@ -16,9 +33,9 @@
       <useAllReactorProjects>true</useAllReactorProjects> -->
       <!-- Now, select which projects to include in this module-set. -->
       <includes>
-        <include>com.nflabs.zeppelin:zeppelin-api-ui</include>
-        <include>com.nflabs.zeppelin:zeppelin-server</include>
-        <include>com.nflabs.zeppelin:zeppelin-web</include>
+        <include>org.apache.zeppelin:zeppelin-api-ui</include>
+        <include>org.apache.zeppelin:zeppelin-server</include>
+        <include>org.apache.zeppelin:zeppelin-web</include>
       </includes>
       <useProjectArtifact>false</useProjectArtifact>
       <useTransitiveDependencies>false</useTransitiveDependencies>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/src/deb/control/control
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/src/deb/control/control b/zeppelin-distribution/src/deb/control/control
index a9d4222..507d151 100644
--- a/zeppelin-distribution/src/deb/control/control
+++ b/zeppelin-distribution/src/deb/control/control
@@ -1,3 +1,22 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
 Package: [[deb.pkg.name]]
 Version: [[version]]-[[buildNumber]]
 Section: misc

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/src/deb/control/prerm
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/src/deb/control/prerm b/zeppelin-distribution/src/deb/control/prerm
index 447d708..85977fc 100644
--- a/zeppelin-distribution/src/deb/control/prerm
+++ b/zeppelin-distribution/src/deb/control/prerm
@@ -1,4 +1,22 @@
 #!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
 
 set -e
 
@@ -6,4 +24,4 @@ SERVICE=$(which service 2> /dev/null)
 RM=$(which rm 2> /dev/null)
 
 exec $SERVICE zeppelind stop
-exec $RM -rf [[deb.log.path]]/* [[deb.pid.path]]/*
\ No newline at end of file
+exec $RM -rf [[deb.log.path]]/* [[deb.pid.path]]/*

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-distribution/src/deb/init.d/zeppelind
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/src/deb/init.d/zeppelind b/zeppelin-distribution/src/deb/init.d/zeppelind
index dbb22c4..d9752df 100755
--- a/zeppelin-distribution/src/deb/init.d/zeppelind
+++ b/zeppelin-distribution/src/deb/init.d/zeppelind
@@ -1,5 +1,23 @@
 #!/bin/bash
 #
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
 ### BEGIN INIT INFO
 # Provides:          zeppelind
 # Required-Start:    $remote_fs

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/Makefile
----------------------------------------------------------------------
diff --git a/zeppelin-docs/Makefile b/zeppelin-docs/Makefile
deleted file mode 100644
index 7614d1a..0000000
--- a/zeppelin-docs/Makefile
+++ /dev/null
@@ -1,174 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS    =
-SPHINXBUILD   = sphinx-build
-PAPER         =
-BUILDDIR      = target
-SOURCEDIR     = src/main/sphinx
-
-# User-friendly check for sphinx-build
-ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
-$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
-endif
-
-# Internal variables.
-PAPEROPT_a4     = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS   = -W -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SOURCEDIR)
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SOURCEDIR)
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
-
-help:
-	@echo "Please use \`make <target>' where <target> is one of"
-	@echo "  html       to make standalone HTML files"
-	@echo "  dirhtml    to make HTML files named index.html in directories"
-	@echo "  singlehtml to make a single large HTML file"
-	@echo "  pickle     to make pickle files"
-	@echo "  json       to make JSON files"
-	@echo "  htmlhelp   to make HTML files and a HTML help project"
-	@echo "  qthelp     to make HTML files and a qthelp project"
-	@echo "  devhelp    to make HTML files and a Devhelp project"
-	@echo "  epub       to make an epub"
-	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
-	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
-	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
-	@echo "  text       to make text files"
-	@echo "  man        to make manual pages"
-	@echo "  texinfo    to make Texinfo files"
-	@echo "  info       to make Texinfo files and run them through makeinfo"
-	@echo "  gettext    to make PO message catalogs"
-	@echo "  changes    to make an overview of all changed/added/deprecated items"
-	@echo "  xml        to make Docutils-native XML files"
-	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
-	@echo "  linkcheck  to check all external links for integrity"
-	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
-
-clean:
-	rm -rf $(BUILDDIR)/*
-
-html:
-	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-dirhtml:
-	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-singlehtml:
-	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
-	@echo
-	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
-
-pickle:
-	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
-	@echo
-	@echo "Build finished; now you can process the pickle files."
-
-json:
-	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
-	@echo
-	@echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
-	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
-	@echo
-	@echo "Build finished; now you can run HTML Help Workshop with the" \
-	      ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-qthelp:
-	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
-	@echo
-	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
-	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
-
-
-devhelp:
-	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
-	@echo
-	@echo "Build finished."
-	@echo "To view the help file:"
-	@echo "# devhelp"
-
-epub:
-	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
-	@echo
-	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
-latex:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo
-	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
-	@echo "Run \`make' in that directory to run these through (pdf)latex" \
-	      "(use \`make latexpdf' here to do that automatically)."
-
-latexpdf:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through pdflatex..."
-	$(MAKE) -C $(BUILDDIR)/latex all-pdf
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-latexpdfja:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through platex and dvipdfmx..."
-	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-text:
-	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
-	@echo
-	@echo "Build finished. The text files are in $(BUILDDIR)/text."
-
-man:
-	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
-	@echo
-	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
-
-texinfo:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo
-	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
-	@echo "Run \`make' in that directory to run these through makeinfo" \
-	      "(use \`make info' here to do that automatically)."
-
-info:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo "Running Texinfo files through makeinfo..."
-	make -C $(BUILDDIR)/texinfo info
-	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
-
-gettext:
-	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
-	@echo
-	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
-
-changes:
-	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
-	@echo
-	@echo "The overview file is in $(BUILDDIR)/changes."
-
-linkcheck:
-	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
-	@echo
-	@echo "Link check complete; look for any errors in the above output " \
-	      "or in $(BUILDDIR)/linkcheck/output.txt."
-
-doctest:
-	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
-	@echo "Testing of doctests in the sources finished, look at the " \
-	      "results in $(BUILDDIR)/doctest/output.txt."
-
-xml:
-	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
-	@echo
-	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
-
-pseudoxml:
-	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
-	@echo
-	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/README.md
----------------------------------------------------------------------
diff --git a/zeppelin-docs/README.md b/zeppelin-docs/README.md
deleted file mode 100644
index 01b9c93..0000000
--- a/zeppelin-docs/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
-# Zeppelin documentation
-Build Zeppelin documentation
-
-## Get Sphinx-doc
- - Install [Sphinx](http://sphinx-doc.org/install.html)
-
-## Build
-```mvn package```
-Or
-```make html```
-
-Sphinx will create the folder "targert".
-
-### Build failed
- - make: sphinx-build: No such file or directory
-```
-Make sure sphinx is correctly installed
-Check the sphinx app name (can be sphinx-build27, in that case make a symbolic link)
-```
- - ValueError: unknown locale: UTF-8
-````
-export LC_ALL=en_US.UTF-8
-export LANG=en_US.UTF-8
-```

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-docs/pom.xml b/zeppelin-docs/pom.xml
deleted file mode 100644
index 9c43ada..0000000
--- a/zeppelin-docs/pom.xml
+++ /dev/null
@@ -1,87 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <artifactId>zeppelin</artifactId>
-    <groupId>com.nflabs.zeppelin</groupId>
-    <version>0.5.0-SNAPSHOT</version>
-  </parent>
-
-  <groupId>com.nflabs.zeppelin</groupId>
-  <artifactId>zeppelin-api-ui</artifactId>
-  <packaging>war</packaging>
-  <version>0.5.0-SNAPSHOT</version>
-  <name>Zeppelin: Documentation</name>
-  <url>http://www.nflabs.com</url>
-  
-  <!--
-      Genarate HTML docs quickly while developing:
-      - brew install python
-      - pip install sphinx
-    -->
-  
-  <build>
-    <finalName>zeppelin-api-ui</finalName>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>com.mycila</groupId>
-          <artifactId>license-maven-plugin</artifactId>
-          <configuration>
-            <excludes combine.children="append">
-              <exclude>**/*.conf</exclude>
-              <exclude>**/*.css_t</exclude>
-            </excludes>
-          </configuration>
-        </plugin>
-      </plugins>
-    </pluginManagement>
-    
-    <plugins>
-      <!-- Swagger ui -->
-      <plugin>
-	<groupId>org.apache.maven.plugins</groupId>
-	<artifactId>maven-war-plugin</artifactId>
-	<configuration>
-	  <failOnMissingWebXml>false</failOnMissingWebXml>
-	  <webResources>
-	    <resource>
-	      <directory>src/main/swagger</directory>
-	    </resource>
-	  </webResources>
-	</configuration>
-      </plugin>
-      
-      <!-- Sphinx -->
-      <plugin>
-	<groupId>org.tomdz.maven</groupId>
-	<artifactId>sphinx-maven-plugin</artifactId>
-	<configuration>
-	  <fork>true</fork>
-	  <force>true</force>
-	  <warningsAsErrors>true</warningsAsErrors>
-	  <sourceDirectory>${project.basedir}/src/main/sphinx</sourceDirectory>
-	  <outputDirectory>${project.build.directory}/html</outputDirectory>
-	</configuration>
-	<executions>
-	  <execution>
-	    <phase>package</phase>
-	    <goals>
-	      <goal>generate</goal>
-	    </goals>
-	  </execution>
-	</executions>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-deploy-plugin</artifactId>
-        <version>2.7</version>
-        <configuration>
-          <skip>true</skip>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/index.rst
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/index.rst b/zeppelin-docs/src/main/sphinx/index.rst
deleted file mode 100644
index 9d5468e..0000000
--- a/zeppelin-docs/src/main/sphinx/index.rst
+++ /dev/null
@@ -1,21 +0,0 @@
-.. Zeppelin documentation master file, created by
-   sphinx-quickstart on Wed Jan 29 17:19:36 2014.
-   You can adapt this file completely to your liking, but it should at least
-   contain the root `toctree` directive.
-
-Zeppelin documentation
-====================================
-
-.. toctree::
-   :maxdepth: 4
-   :numbered: 2
-   :titlesonly:
-
-   installation
-
-Link
-====================================
-.. toctree::
-    :maxdepth: 1
-
-* :ref:`search`
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/installation.rst
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/installation.rst b/zeppelin-docs/src/main/sphinx/installation.rst
deleted file mode 100644
index 6ecc42d..0000000
--- a/zeppelin-docs/src/main/sphinx/installation.rst
+++ /dev/null
@@ -1,8 +0,0 @@
-************
-Installation
-************
-
-.. toctree::
-    :maxdepth: 1
-
-    installation/install
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/installation/install.rst
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/installation/install.rst b/zeppelin-docs/src/main/sphinx/installation/install.rst
deleted file mode 100644
index 5317e00..0000000
--- a/zeppelin-docs/src/main/sphinx/installation/install.rst
+++ /dev/null
@@ -1,101 +0,0 @@
-=================
-Install Zeppelin
-=================
-
-Instructions for the Impatient
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Install Zeppelin in local mode
-
-.. code-block:: bash
-
-  # this scripts install hadoop and zeppelin in current directory and start zeppelin in local mode
-  # download and unarchive hadoop distribution package
-  curl -O http://apache.mirror.cdnetworks.com/hadoop/common/hadoop-1.2.1/hadoop-1.2.1-bin.tar.gz
-  tar -xzf hadoop-1.2.1-bin.tar.gz
-
-  # download zeppelin and unarchive
-  curl -O https://s3-ap-northeast-1.amazonaws.com/zeppel.in/zeppelin-0.3.0.tar.gz
-  tar -xzf zeppelin-0.3.0.tar.gz
-
-  # set HADOOP_HOME
-  echo "export HADOOP_HOME=`pwd`/hadoop-1.2.1" >> zeppelin-0.3.0/conf/zeppelin-env.sh
-
-  # start zeppelin
-  ./zeppelin-0.3.0/bin/zeppelin-daemon.sh start
-
-You can access Zeppelin with browser http://localhost:8080
-
-Install
-^^^^^^^
-Configuring Zeppelin with existing hadoop cluster, refer this section.
-
-Prerequisites
--------------
-Java 1.6 or Later
-Apache Hadoop (Standalone mode)
-Download
-To get Zeppelin distribution, download a recent release.
-
-Install
--------
-Unpack the downloaded Zeppelin distribution.
-
-Configure
----------
-Configuration can be done by both environment variable and java properties. If both defined, environment vaiable is used.
-
-=========================    =======================  ============================== ===========
-zepplin-env.sh	             zepplin-site.xml         Default value  		     Description
-=========================    =======================  ============================== ===========
-ZEPPELIN_HOME	  		    		   	   		   	     Zeppelin Home directory
-ZEPPELIN_PORT         	     zeppelin.server.port     8080	   		     Zeppelin server port
-ZEPPELIN_JOB_DIR             zeppelin.job.dir         jobs	   		     Zeppelin persist/load session in this directory. Can be a path or a URI. location on HDFS supported
-ZEPPELIN_ZAN_REPO            zeppelin.zan.repo        https://github.com/NFLabs/zan  Remote ZAN repository URL
-ZEPPELIN_ZAN_LOCAL_REPO      zeppelin.zan.localrepo   zan-repo	 		     Zeppelin library local repository. Local filesystem path
-ZEPPELIN_ZAN_SHARED_REPO     zeppelin.zan.sharedrepo				     Zeppelin library shared repository. Location on HDFS. Usufull when your backend (eg. hiveserver) is not running on the sam machine and want to use zeppelin library with resource file(eg. in hive 'ADD FILE 'path'). So your backend can get resource file from shared repository.
-ZEPPELIN_DRIVERS             zeppelin.drivers         hive:hive2://,exec:exec://     Comma separated list of [Name]:[Connection URI]
-ZEPPELIN_DRIVER_DIR          zeppelin.driver.dir      drivers			     Zeppelin driver directory.
-=========================    =======================  ============================== ===========
-
-Configuring with existing Hive
--------------------------------
-If you have hive already installed in your hadoop cluster, just run hive server and make Zeppelin to connect it. There're two different version of hive servers, Hive Server1, Hive Server2. Make sure you have Hive server running.
-
-And then, add connection uri in zeppelin.drivers at zeppelin-site.xml If you have Hive Server 1 installed and running on host hiveserver1Address on port 10000, configuration property can be
-
-.. code-block:: bash
- 
- <property>
-   <name>zeppelin.drivers</name>
-   <value>hive:hive://hiveserver1Address:10000/default,exec:exec://</value>
-   <description>Comma separated driver configurations uri. </description>
- </property>
-
-If Hive Server 2 installed and running on host hiveserver2Address on port 10000, configuration will be
-
-.. code-block:: bash
-
-  <property>
-   <name>zeppelin.drivers</name>
-   <value>hive:hive2://hiveserver2Address:10000/default,exec:exec://</value>
-   <description>Comma separated driver configurations uri. </description>
-  </property>
-
-Start/Stop
-^^^^^^^^^^
-
-**Start Zeppelin**
-
-.. code-block:: bash
-
-  bin/zeppelin-daemon.sh start
-
-After successful start, visit http://localhost:8080 with your web browser
-
-**Stop Zeppelin**
-
-.. code-block:: bash
-
-  bin/zeppelin-daemon.sh stop
-

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/layout.html
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/layout.html b/zeppelin-docs/src/main/sphinx/templates/zeppelin/layout.html
deleted file mode 100644
index 3cbcded..0000000
--- a/zeppelin-docs/src/main/sphinx/templates/zeppelin/layout.html
+++ /dev/null
@@ -1,38 +0,0 @@
-{%- extends 'basic/layout.html' %}
-
-{% block relbar1 %}{% endblock %}
-{% block relbar2 %}{% endblock %}
-
-{% macro nav() %}
-<p class="nav">
-    <span class="left">
-        {%- if prev %}
-        &laquo; <a href="{{ prev.link|e }}">{{ prev.title }}</a>
-        {%- else %}
-        &nbsp;
-        {%- endif %}
-    </span>
-    <span class="right">
-        {%- if next %}
-        <a href="{{ next.link|e }}">{{ next.title }}</a> &raquo;
-        {%- endif %}
-    </span>
-</p>
-{% endmacro %}
-
-{% block content %}
-<div class="header">
-    <h1 class="heading"><a href="{{ pathto('index') }}">
-        <span>{{ shorttitle|e }}</span></a></h1>
-    <h2 class="heading"><span>{{ title|striptags|e }}</span></h2>
-</div>
-<div class="topnav">
-    {{ nav() }}
-</div>
-<div class="content">
-    {% block body %}{% endblock %}
-</div>
-<div class="bottomnav">
-    {{ nav() }}
-</div>
-{% endblock %}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_info_32.png
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_info_32.png b/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_info_32.png
deleted file mode 100644
index f0f5ebb..0000000
Binary files a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_info_32.png and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_warning_32.png
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_warning_32.png b/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_warning_32.png
deleted file mode 100644
index 114fa32..0000000
Binary files a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/alert_warning_32.png and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/bullet.png
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/bullet.png b/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/bullet.png
deleted file mode 100644
index f0c9386..0000000
Binary files a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/bullet.png and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/zeppelin.css_t
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/zeppelin.css_t b/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/zeppelin.css_t
deleted file mode 100644
index 44401e9..0000000
--- a/zeppelin-docs/src/main/sphinx/templates/zeppelin/static/zeppelin.css_t
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-@import url("haiku.css");
-@import url(http://fonts.googleapis.com/css?family=Anonymous+Pro);
-@import url(http://fonts.googleapis.com/css?family=Open+Sans:400italic,600italic,700italic,400,600,700);
-
-html {
-    background: #fff;
-    -webkit-font-smoothing: antialiased;
-}
-
-body {
-/*    font-family: 'Helvetica Neue', Helvetica, Arial, 'lucida grande', tahoma, verdana, arial, sans-serif;*/
-    font-family: 'Open Sans', sans-serif;
-    color: #222222;
-}
-
-div.header {
-    background-color: rgb(48, 113, 169);
-    background-image: url(presto.png);
-    background-position: 36px 50%;
-    background-repeat: no-repeat;
-    background-size: 57px 50px;
-    padding: 10px 40px 20px 120px;
-        color: #fff !important;
-}
-
-div.header h1 a {
-    color: #fff !important;
-}
-
-div.header h2 {
-    color: rgb(184, 184, 185);
-}
-
-.nav .right {
-    float: right;
-    text-align: right;
-}
-
-.highlight pre {
- background-color:white;
- font-family: 'Anonymous Pro', sans-serif;
-}
-
-pre {
-    padding: 0.8em;
-    border: 0;
-}
-
-div.topnav {
-    background: #eeeeee;
-}
-
-div.topnav,
-div.bottomnav {
-    padding: 10px 0;
-}
-
-div.topnav p,
-div.bottomnav p {
-    font-size: 1.0em;
-    margin: 0 40px;
-    text-align: left;
-}
-
-div.header h1 {
-    font-size: 1.8em;
-}
-
-h1 {
-    font-size: 1.5em;
-}
-
-h2 {
-    font-size: 1.4em;
-}
-
-h3 {
-    font-size: 1.3em;
-}
-
-h4 {
-    font-size: 1.2em;
-}
-
-div.content {
-    font-size: 1.0em;
-}
-
-div.content ul > li {
-    background-image: url(bullet.png);
-}
-
-div.admonition {
-    border-radius: 3px;
-    border-style: solid;
-}
-
-div.note {
-    background-color: #f5f5ff
-}
-
-tt {
-    font-family: Consolas, 'Liberation Mono', Courier, monospace;
-    font-size: 0.9em;
-}
-
-tt.descname {
-    font-size: 1.1em;
-}
-
-tt.docutils.literal {
-    margin: 0 2px;
-    padding: 0 5px;
-    border: 1px solid #ddd;
-    background-color: #f8f8f8;
-    border-radius: 3px;
-    white-space: nowrap;
-}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/669d408d/zeppelin-docs/src/main/sphinx/templates/zeppelin/theme.conf
----------------------------------------------------------------------
diff --git a/zeppelin-docs/src/main/sphinx/templates/zeppelin/theme.conf b/zeppelin-docs/src/main/sphinx/templates/zeppelin/theme.conf
deleted file mode 100644
index 726cae7..0000000
--- a/zeppelin-docs/src/main/sphinx/templates/zeppelin/theme.conf
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-[theme]
-inherit = haiku
-stylesheet = zeppelin.css
-
-[options]
-bodyfont = 'Open Sans', sans-serif
-
-full_logo         = false
-textcolor         = #ecf2fc
-headingcolor      = #374665
-
-linkcolor         = #3b5998
-visitedlinkcolor  = #3b5998
-hoverlinkcolor    = #3b5998